repo
stringclasses
358 values
pull_number
int64
6
67.9k
instance_id
stringlengths
12
49
issue_numbers
sequencelengths
1
7
base_commit
stringlengths
40
40
patch
stringlengths
87
101M
test_patch
stringlengths
72
22.3M
problem_statement
stringlengths
3
256k
hints_text
stringlengths
0
545k
created_at
stringlengths
20
20
PASS_TO_PASS
sequencelengths
0
0
FAIL_TO_PASS
sequencelengths
0
0
freedomofpress/securedrop
3,619
freedomofpress__securedrop-3619
[ "1761" ]
5fb212744a7cb93cd1d0bf842ad3a37fcafa92ba
diff --git a/securedrop/alembic/env.py b/securedrop/alembic/env.py --- a/securedrop/alembic/env.py +++ b/securedrop/alembic/env.py @@ -68,7 +68,8 @@ def run_migrations_online(): with connectable.connect() as connection: context.configure( connection=connection, - target_metadata=target_metadata + target_metadata=target_metadata, + render_as_batch=True ) with context.begin_transaction(): diff --git a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py @@ -0,0 +1,69 @@ +"""Create source UUID column + +Revision ID: 3d91d6948753 +Revises: faac8092c123 +Create Date: 2018-07-09 22:39:05.088008 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import quoted_name +import subprocess +import uuid + +# revision identifiers, used by Alembic. +revision = '3d91d6948753' +down_revision = 'faac8092c123' +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table('sources', 'sources_tmp') + + # Add UUID column. + op.add_column('sources_tmp', sa.Column('uuid', sa.String(length=36))) + + # Add UUIDs to sources_tmp table. + conn = op.get_bind() + sources = conn.execute(sa.text("SELECT * FROM sources_tmp")).fetchall() + + for source in sources: + id = source.id + source_uuid = str(uuid.uuid4()) + conn.execute( + sa.text("UPDATE sources_tmp SET uuid=('{}') WHERE id={}".format( + source_uuid, id))) + + # Now create new table with unique constraint applied. + op.create_table(quoted_name('sources', quote=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('filesystem_id', sa.String(length=96), nullable=True), + sa.Column('journalist_designation', sa.String(length=255), + nullable=False), + sa.Column('flagged', sa.Boolean(), nullable=True), + sa.Column('last_updated', sa.DateTime(), nullable=True), + sa.Column('pending', sa.Boolean(), nullable=True), + sa.Column('interaction_count', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid'), + sa.UniqueConstraint('filesystem_id') + ) + + # Data Migration: move all sources into the new table. + conn.execute(''' + INSERT INTO sources + SELECT id, uuid, filesystem_id, journalist_designation, flagged, + last_updated, pending, interaction_count + FROM sources_tmp + ''') + + # Now delete the old table. + op.drop_table('sources_tmp') + + +def downgrade(): + with op.batch_alter_table('sources', schema=None) as batch_op: + batch_op.drop_column('uuid') diff --git a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py @@ -0,0 +1,66 @@ +"""create submission uuid column + +Revision ID: fccf57ceef02 +Revises: 3d91d6948753 +Create Date: 2018-07-12 00:06:20.891213 + +""" +from alembic import op +import sqlalchemy as sa + +import uuid + +# revision identifiers, used by Alembic. +revision = 'fccf57ceef02' +down_revision = '3d91d6948753' +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table('submissions', 'submissions_tmp') + + # Add UUID column. + op.add_column('submissions_tmp', sa.Column('uuid', sa.String(length=36))) + + # Add UUIDs to submissions_tmp table. + conn = op.get_bind() + submissions = conn.execute( + sa.text("SELECT * FROM submissions_tmp")).fetchall() + + for submission in submissions: + id = submission.id + submission_uuid = str(uuid.uuid4()) + conn.execute( + sa.text("""UPDATE submissions_tmp + SET uuid=('{}') + WHERE id={}""".format(submission_uuid, id))) + + # Now create new table with unique constraint applied. + op.create_table('submissions', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('source_id', sa.Integer(), nullable=True), + sa.Column('filename', sa.String(length=255), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('downloaded', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid') + ) + + # Data Migration: move all submissions into the new table. + conn.execute(''' + INSERT INTO submissions + SELECT id, uuid, source_id, filename, size, downloaded + FROM submissions_tmp + ''') + + # Now delete the old table. + op.drop_table('submissions_tmp') + + +def downgrade(): + with op.batch_alter_table('submissions', schema=None) as batch_op: + batch_op.drop_column('uuid') diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -193,6 +193,10 @@ def getkey(self, name): return key['fingerprint'] return None + def export_pubkey(self, name): + fingerprint = self.getkey(name) + return self.gpg.export_keys(fingerprint) + def encrypt(self, plaintext, fingerprints, output=None): # Verify the output path if output: diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- from datetime import datetime, timedelta -from flask import Flask, session, redirect, url_for, flash, g, request +from flask import (Flask, session, redirect, url_for, flash, g, request, + render_template) from flask_assets import Environment from flask_babel import gettext from flask_wtf.csrf import CSRFProtect, CSRFError from os import path +from werkzeug.exceptions import default_exceptions # type: ignore import i18n import template_filters @@ -13,7 +15,7 @@ from crypto_util import CryptoUtil from db import db -from journalist_app import account, admin, main, col +from journalist_app import account, admin, api, main, col from journalist_app.utils import get_source, logged_in from models import Journalist from store import Storage @@ -39,7 +41,7 @@ def create_app(config): app.config.from_object(config.JournalistInterfaceFlaskConfig) app.sdconfig = config - CSRFProtect(app) + csrf = CSRFProtect(app) Environment(app) if config.DATABASE_ENGINE == "sqlite": @@ -80,6 +82,18 @@ def handle_csrf_error(e): flash(msg, 'error') return redirect(url_for('main.login')) + def _handle_http_exception(error): + # Workaround for no blueprint-level 404/5 error handlers, see: + # https://github.com/pallets/flask/issues/503#issuecomment-71383286 + handler = app.error_handler_spec['api'][error.code].values()[0] + if request.path.startswith('/api/') and handler: + return handler(error) + + return render_template('error.html', error=error), error.code + + for code in default_exceptions: + app.errorhandler(code)(_handle_http_exception) + i18n.setup_app(config, app) app.jinja_env.trim_blocks = True @@ -97,17 +111,6 @@ def handle_csrf_error(e): template_filters.rel_datetime_format app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat - @app.template_filter('autoversion') - def autoversion_filter(filename): - """Use this template filter for cache busting""" - absolute_filename = path.join(config.SECUREDROP_ROOT, filename[1:]) - if path.exists(absolute_filename): - timestamp = str(path.getmtime(absolute_filename)) - else: - return filename - versioned_filename = "{0}?v={1}".format(filename, timestamp) - return versioned_filename - @app.before_request def setup_g(): """Store commonly used values in Flask's special g object""" @@ -130,8 +133,11 @@ def setup_g(): g.html_lang = i18n.locale_to_rfc_5646(g.locale) g.locales = i18n.get_locale2name() - if request.endpoint not in _insecure_views and not logged_in(): - return redirect(url_for('main.login')) + if request.path.split('/')[1] == 'api': + pass # We use the @token_required decorator for the API endpoints + else: # We are not using the API + if request.endpoint not in _insecure_views and not logged_in(): + return redirect(url_for('main.login')) if request.method == 'POST': filesystem_id = request.form.get('filesystem_id') @@ -144,5 +150,8 @@ def setup_g(): url_prefix='/account') app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin') app.register_blueprint(col.make_blueprint(config), url_prefix='/col') + api_blueprint = api.make_blueprint(config) + app.register_blueprint(api_blueprint, url_prefix='/api/v1') + csrf.exempt(api_blueprint) return app diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py new file mode 100644 --- /dev/null +++ b/securedrop/journalist_app/api.py @@ -0,0 +1,250 @@ +from datetime import datetime, timedelta +from functools import wraps +import hashlib +import json +from werkzeug.exceptions import default_exceptions # type: ignore + +from flask import abort, Blueprint, current_app, jsonify, request, send_file + +from db import db +from journalist_app import utils +from models import (Journalist, Reply, Source, Submission, + LoginThrottledException, InvalidUsernameException, + BadTokenException, WrongPasswordException) +from store import NotEncrypted + + +TOKEN_EXPIRATION_MINS = 60 * 8 + + +def get_user_object(request): + """Helper function to use in token_required views that need a user + object + """ + auth_token = request.headers.get('Authorization').split(" ")[1] + user = Journalist.validate_api_token_and_get_user(auth_token) + return user + + +def token_required(f): + @wraps(f) + def decorated_function(*args, **kwargs): + try: + auth_header = request.headers['Authorization'] + except KeyError: + return abort(403, 'API token not found in Authorization header.') + + if auth_header: + auth_token = auth_header.split(" ")[1] + else: + auth_token = '' + if not Journalist.validate_api_token_and_get_user(auth_token): + return abort(403, 'API token is invalid or expired.') + return f(*args, **kwargs) + return decorated_function + + +def get_or_404(model, object_id, column=''): + if column: + result = model.query.filter(column == object_id).one_or_none() + else: + result = model.query.get(object_id) + if result is None: + abort(404) + return result + + +def make_blueprint(config): + api = Blueprint('api', __name__) + + @api.route('/') + def get_endpoints(): + endpoints = {'sources_url': '/api/v1/sources', + 'current_user_url': '/api/v1/user', + 'submissions_url': '/api/v1/submissions', + 'auth_token_url': '/api/v1/token'} + return jsonify(endpoints), 200 + + @api.route('/token', methods=['POST']) + def get_token(): + creds = json.loads(request.data) + + username = creds.get('username', None) + passphrase = creds.get('passphrase', None) + one_time_code = creds.get('one_time_code', None) + + if username is None: + return abort(400, 'username field is missing') + if passphrase is None: + return abort(400, 'passphrase field is missing') + if one_time_code is None: + return abort(400, 'one_time_code field is missing') + + try: + journalist = Journalist.login(username, passphrase, one_time_code) + token_expiry = datetime.utcnow() + timedelta( + seconds=TOKEN_EXPIRATION_MINS * 60) + response = jsonify({'token': journalist.generate_api_token( + expiration=TOKEN_EXPIRATION_MINS * 60), + 'expiration': token_expiry.isoformat() + 'Z'}) + + # Update access metadata + journalist.last_access = datetime.utcnow() + db.session.add(journalist) + db.session.commit() + + return response, 200 + except (LoginThrottledException, InvalidUsernameException, + BadTokenException, WrongPasswordException): + return abort(403, 'Token authentication failed.') + + @api.route('/sources', methods=['GET']) + @token_required + def get_all_sources(): + sources = Source.query.filter_by(pending=False).all() + return jsonify( + {'sources': [source.to_json() for source in sources]}), 200 + + @api.route('/sources/<source_uuid>', methods=['GET', 'DELETE']) + @token_required + def single_source(source_uuid): + if request.method == 'GET': + source = get_or_404(Source, source_uuid, column=Source.uuid) + return jsonify(source.to_json()), 200 + elif request.method == 'DELETE': + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.delete_collection(source.filesystem_id) + return jsonify({'message': 'Source and submissions deleted'}), 200 + + @api.route('/sources/<source_uuid>/add_star', methods=['POST']) + @token_required + def add_star(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.make_star_true(source.filesystem_id) + db.session.commit() + return jsonify({'message': 'Star added'}), 201 + + @api.route('/sources/<source_uuid>/remove_star', methods=['DELETE']) + @token_required + def remove_star(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.make_star_false(source.filesystem_id) + db.session.commit() + return jsonify({'message': 'Star removed'}), 200 + + @api.route('/sources/<source_uuid>/flag', methods=['POST']) + @token_required + def flag(source_uuid): + source = get_or_404(Source, source_uuid, + column=Source.uuid) + source.flagged = True + db.session.commit() + return jsonify({'message': 'Source flagged for reply'}), 200 + + @api.route('/sources/<source_uuid>/submissions', methods=['GET']) + @token_required + def all_source_submissions(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + return jsonify( + {'submissions': [submission.to_json() for + submission in source.submissions]}), 200 + + @api.route('/sources/<source_uuid>/submissions/<submission_uuid>/download', # noqa + methods=['GET']) + @token_required + def download_submission(source_uuid, submission_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, + column=Submission.uuid) + + # Mark as downloaded + submission.downloaded = True + db.session.commit() + + response = send_file(current_app.storage.path(source.filesystem_id, + submission.filename), + mimetype="application/pgp-encrypted", + as_attachment=True, + add_etags=False) # Disable Flask default ETag + + response.direct_passthrough = False + response.headers['Etag'] = '"sha256:{}"'.format( + hashlib.sha256(response.get_data()).hexdigest()) + return response + + @api.route('/sources/<source_uuid>/submissions/<submission_uuid>', + methods=['GET', 'DELETE']) + @token_required + def single_submission(source_uuid, submission_uuid): + if request.method == 'GET': + submission = get_or_404(Submission, submission_uuid, + column=Submission.uuid) + return jsonify(submission.to_json()), 200 + elif request.method == 'DELETE': + submission = get_or_404(Submission, submission_uuid, + column=Submission.uuid) + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.delete_file(source.filesystem_id, submission.filename, + submission) + return jsonify({'message': 'Submission deleted'}), 200 + + @api.route('/sources/<source_uuid>/reply', methods=['POST']) + @token_required + def post_reply(source_uuid): + source = get_or_404(Source, source_uuid, + column=Source.uuid) + if request.json is None: + abort(400, 'please send requests in valid JSON') + + if 'reply' not in request.json: + abort(400, 'reply not found in request body') + + user = get_user_object(request) + + data = json.loads(request.data) + if not data['reply']: + abort(400, 'reply should not be empty') + + source.interaction_count += 1 + try: + filename = current_app.storage.save_pre_encrypted_reply( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + data['reply']) + except NotEncrypted: + return jsonify( + {'message': 'You must encrypt replies client side'}), 400 + + reply = Reply(user, source, + current_app.storage.path(source.filesystem_id, filename)) + db.session.add(reply) + db.session.add(source) + db.session.commit() + return jsonify({'message': 'Your reply has been stored'}), 201 + + @api.route('/submissions', methods=['GET']) + @token_required + def get_all_submissions(): + submissions = Submission.query.all() + return jsonify({'submissions': [submission.to_json() for + submission in submissions]}), 200 + + @api.route('/user', methods=['GET']) + @token_required + def get_current_user(): + user = get_user_object(request) + return jsonify(user.to_json()), 200 + + def _handle_http_exception(error): + # Workaround for no blueprint-level 404/5 error handlers, see: + # https://github.com/pallets/flask/issues/503#issuecomment-71383286 + response = jsonify({'error': error.name, + 'message': error.description}) + + return response, error.code + + for code in default_exceptions: + api.errorhandler(code)(_handle_http_exception) + + return api diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -170,12 +170,16 @@ def download(zip_basename, submissions): as_attachment=True) +def delete_file(filesystem_id, filename, file_object): + file_path = current_app.storage.path(filesystem_id, filename) + worker.enqueue(srm, file_path) + db.session.delete(file_object) + db.session.commit() + + def bulk_delete(filesystem_id, items_selected): for item in items_selected: - item_path = current_app.storage.path(filesystem_id, item.filename) - worker.enqueue(srm, item_path) - db.session.delete(item) - db.session.commit() + delete_file(filesystem_id, item.filename, item) flash(ngettext("Submission deleted.", "{num} submissions deleted.".format( diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -8,6 +8,7 @@ import qrcode # Using svg because it doesn't require additional dependencies import qrcode.image.svg +import uuid # Find the best implementation available on this platform try: @@ -15,7 +16,8 @@ except ImportError: from StringIO import StringIO # type: ignore -from flask import current_app +from flask import current_app, url_for +from itsdangerous import TimedJSONWebSignatureSerializer, BadData from jinja2 import Markup from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship, backref @@ -49,6 +51,7 @@ def get_one_or_else(query, logger, failure_method): class Source(db.Model): __tablename__ = 'sources' id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) filesystem_id = Column(String(96), unique=True) journalist_designation = Column(String(255), nullable=False) flagged = Column(Boolean, default=False) @@ -69,6 +72,7 @@ class Source(db.Model): def __init__(self, filesystem_id=None, journalist_designation=None): self.filesystem_id = filesystem_id self.journalist_designation = journalist_designation + self.uuid = str(uuid.uuid4()) def __repr__(self): return '<Source %r>' % (self.journalist_designation) @@ -102,10 +106,49 @@ def collection(self): collection.sort(key=lambda x: int(x.filename.split('-')[0])) return collection + @property + def public_key(self): + return current_app.crypto_util.export_pubkey(self.filesystem_id) + + @public_key.setter + def public_key(self, value): + raise NotImplementedError + + @public_key.deleter + def public_key(self): + raise NotImplementedError + + def to_json(self): + docs_msg_count = self.documents_messages_count() + + json_source = { + 'uuid': self.uuid, + 'url': url_for('api.single_source', source_uuid=self.uuid), + 'journalist_designation': self.journalist_designation, + 'is_flagged': self.flagged, + 'is_starred': True if self.star else False, + 'last_updated': self.last_updated.isoformat() + 'Z', + 'interaction_count': self.interaction_count, + 'key': { + 'type': 'PGP', + 'public': self.public_key + }, + 'number_of_documents': docs_msg_count['documents'], + 'number_of_messages': docs_msg_count['messages'], + 'submissions_url': url_for('api.all_source_submissions', + source_uuid=self.uuid), + 'add_star_url': url_for('api.add_star', source_uuid=self.uuid), + 'remove_star_url': url_for('api.remove_star', + source_uuid=self.uuid), + 'reply_url': url_for('api.post_reply', source_uuid=self.uuid) + } + return json_source + class Submission(db.Model): __tablename__ = 'submissions' id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) source_id = Column(Integer, ForeignKey('sources.id')) source = relationship( "Source", @@ -119,12 +162,30 @@ class Submission(db.Model): def __init__(self, source, filename): self.source_id = source.id self.filename = filename + self.uuid = str(uuid.uuid4()) self.size = os.stat(current_app.storage.path(source.filesystem_id, filename)).st_size def __repr__(self): return '<Submission %r>' % (self.filename) + def to_json(self): + json_submission = { + 'source_url': url_for('api.single_source', + source_uuid=self.source.uuid), + 'submission_url': url_for('api.single_submission', + source_uuid=self.source.uuid, + submission_uuid=self.uuid), + 'filename': self.filename, + 'size': self.size, + 'is_read': self.downloaded, + 'uuid': self.uuid, + 'download_url': url_for('api.download_submission', + source_uuid=self.source.uuid, + submission_uuid=self.uuid), + } + return json_submission + class Reply(db.Model): __tablename__ = "replies" @@ -436,6 +497,28 @@ def login(cls, username, password, token): raise WrongPasswordException("invalid password") return user + def generate_api_token(self, expiration): + s = TimedJSONWebSignatureSerializer( + current_app.config['SECRET_KEY'], expires_in=expiration) + return s.dumps({'id': self.id}).decode('ascii') + + @staticmethod + def validate_api_token_and_get_user(token): + s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY']) + try: + data = s.loads(token) + except BadData: + return None + return Journalist.query.get(data['id']) + + def to_json(self): + json_user = { + 'username': self.username, + 'last_login': self.last_access.isoformat() + 'Z', + 'is_admin': self.is_admin + } + return json_user + class JournalistLoginAttempt(db.Model): diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -24,6 +24,13 @@ class PathException(Exception): pass +class NotEncrypted(Exception): + """An exception raised if a file expected to be encrypted client-side + is actually plaintext. + """ + pass + + class Storage: def __init__(self, storage_path, temp_dir, gpg_key): @@ -145,6 +152,21 @@ def save_file_submission(self, filesystem_id, count, journalist_filename, return encrypted_file_name + def save_pre_encrypted_reply(self, filesystem_id, count, + journalist_filename, content): + + if '-----BEGIN PGP MESSAGE-----' not in content.split('\n')[0]: + raise NotEncrypted + + encrypted_file_name = "{0}-{1}-reply.gpg".format(count, + journalist_filename) + encrypted_file_path = self.path(filesystem_id, encrypted_file_name) + + with open(encrypted_file_path, 'wb') as fh: + fh.write(content) + + return encrypted_file_path + def save_message_submission(self, filesystem_id, count, journalist_filename, message): filename = "{0}-{1}-msg.gpg".format(count, journalist_filename)
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -4,6 +4,7 @@ import logging import os import io +import json import psutil import pytest import shutil @@ -11,6 +12,8 @@ import subprocess from ConfigParser import SafeConfigParser +from flask import url_for +from pyotp import TOTP os.environ['SECUREDROP_ENV'] = 'test' # noqa from sdconfig import SDConfig, config as original_config @@ -160,10 +163,36 @@ def test_admin(journalist_app): def test_source(journalist_app): with journalist_app.app_context(): source, codename = utils.db_helper.init_source() - filesystem_id = source.filesystem_id return {'source': source, 'codename': codename, - 'filesystem_id': filesystem_id} + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid} + + [email protected](scope='function') +def test_submissions(journalist_app): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions} + + [email protected](scope='function') +def journalist_api_token(journalist_app, test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=utils.api_helper.get_api_headers()) + observed_response = json.loads(response.data) + return observed_response['token'] def _start_test_rqworker(config): diff --git a/securedrop/tests/migrations/migration_3d91d6948753.py b/securedrop/tests/migrations/migration_3d91d6948753.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_3d91d6948753.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_datetime, bool_or_none + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + self.add_source() + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, journalist_designation, + flagged, last_updated, pending, interaction_count) + VALUES (:filesystem_id, :journalist_designation, :flagged, + :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + sources = db.engine.execute( + text('SELECT * FROM sources')).fetchall() + assert len(sources) == self.SOURCE_NUM + + for source in sources: + assert source.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + self.add_source() + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, uuid, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM sources" + sources = db.engine.execute(text(sql)).fetchall() + + for source in sources: + try: + # This should produce an exception, as the column (should) + # be gone. + assert source['uuid'] is None + except NoSuchColumnError: + pass + + assert len(sources) == self.SOURCE_NUM diff --git a/securedrop/tests/migrations/migration_fccf57ceef02.py b/securedrop/tests/migrations/migration_fccf57ceef02.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_fccf57ceef02.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- + +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_datetime, bool_or_none + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, uuid, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + add_source() + + for sid in range(1, self.SOURCE_NUM, 8): + for _ in range(random.randint(1, 3)): + self.add_submission(sid) + + # create "abandoned" submissions (issue #1189) + for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50): + self.add_submission(sid) + + db.session.commit() + + @staticmethod + def add_submission(source_id): + params = { + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (source_id, filename, size, + downloaded) + VALUES (:source_id, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + submissions = db.engine.execute( + text('SELECT * FROM submissions')).fetchall() + + for submission in submissions: + assert submission.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + add_source() + + for sid in range(1, self.SOURCE_NUM, 8): + for _ in range(random.randint(1, 3)): + self.add_submission(sid) + + # create "abandoned" submissions (issue #1189) + for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50): + self.add_submission(sid) + + db.session.commit() + + @staticmethod + def add_submission(source_id): + params = { + 'source_id': source_id, + 'uuid': str(uuid.uuid4()), + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (source_id, uuid, filename, size, + downloaded) + VALUES (:source_id, :uuid, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM submissions" + submissions = db.engine.execute(text(sql)).fetchall() + + for submission in submissions: + try: + # This should produce an exception, as the column (should) + # be gone. + assert submission['uuid'] is None + except NoSuchColumnError: + pass diff --git a/securedrop/tests/test_alembic.py b/securedrop/tests/test_alembic.py --- a/securedrop/tests/test_alembic.py +++ b/securedrop/tests/test_alembic.py @@ -42,12 +42,56 @@ def downgrade(alembic_config, migration): def get_schema(app): with app.app_context(): - return list(db.engine.execute(text(''' + result = list(db.engine.execute(text(''' SELECT type, name, tbl_name, sql FROM sqlite_master ORDER BY type, name, tbl_name '''))) + return {(x[0], x[1], x[2]): x[3] for x in result} + + +def assert_schemas_equal(left, right): + for (k, v) in left.items(): + if k not in right: + raise AssertionError( + 'Left contained {} but right did not'.format(k)) + if not ddl_equal(v, right[k]): + raise AssertionError( + 'Schema for {} did not match:\nLeft:\n{}\nRight:\n{}' + .format(k, v, right[k])) + right.pop(k) + + if right: + raise AssertionError( + 'Right had additional tables: {}'.format(right.keys())) + + +def ddl_equal(left, right): + '''Check the "tokenized" DDL is equivalent because, because sometimes + Alembic schemas append columns on the same line to the DDL comes out + like: + + column1 TEXT NOT NULL, column2 TEXT NOT NULL + + and SQLAlchemy comes out: + + column1 TEXT NOT NULL, + column2 TEXT NOT NULL + ''' + # ignore the autoindex cases + if left is None and right is None: + return True + + left = [x for x in left.split('\n') if x] + right = [x for x in right.split('\n') if x] + + # Strip commas, whitespace, quotes + left = [x.replace("\"", "").replace(",", "").strip() for x in left] + right = [x.replace("\"", "").replace(",", "").strip() for x in right] + + return sorted(left) == sorted(right) + def test_alembic_head_matches_db_models(journalist_app, alembic_config, @@ -71,10 +115,10 @@ def test_alembic_head_matches_db_models(journalist_app, # The initial migration creates the table 'alembic_version', but this is # not present in the schema created by `db.create_all()`. - alembic_schema = list(filter(lambda x: x[2] != 'alembic_version', - alembic_schema)) + alembic_schema = {k: v for k, v in alembic_schema.items() + if k[2] != 'alembic_version'} - assert alembic_schema == models_schema + assert_schemas_equal(alembic_schema, models_schema) @pytest.mark.parametrize('migration', ALL_MIGRATIONS) @@ -124,10 +168,10 @@ def test_schema_unchanged_after_up_then_downgrade(alembic_config, # The initial migration is a degenerate case because it creates the table # 'alembic_version', but rolling back the migration doesn't clear it. if len(migrations) == 1: - reverted_schema = list(filter(lambda x: x[2] != 'alembic_version', - reverted_schema)) + reverted_schema = {k: v for k, v in reverted_schema.items() + if k[2] != 'alembic_version'} - assert reverted_schema == original_schema + assert_schemas_equal(reverted_schema, original_schema) @pytest.mark.parametrize('migration', ALL_MIGRATIONS) @@ -137,9 +181,9 @@ def test_upgrade_with_data(alembic_config, config, migration): # Degenerate case where there is no data for the first migration return - # Upgrade to one migration before the target - target = migrations[-1] - upgrade(alembic_config, target) + # Upgrade to one migration before the target stored in `migration` + last_migration = migrations[-2] + upgrade(alembic_config, last_migration) # Dynamic module import mod_name = 'tests.migrations.migration_{}'.format(migration) diff --git a/securedrop/tests/test_db.py b/securedrop/tests/test_db.py --- a/securedrop/tests/test_db.py +++ b/securedrop/tests/test_db.py @@ -4,10 +4,24 @@ from mock import MagicMock from utils import db_helper -from models import (Journalist, Submission, Reply, get_one_or_else, +from models import (Journalist, Submission, Reply, Source, get_one_or_else, LoginThrottledException) +def test_source_public_key_setter_unimplemented(journalist_app, test_source): + with journalist_app.app_context(): + source = Source.query.first() + with pytest.raises(NotImplementedError): + source.public_key = 'a curious developer tries to set a pubkey!' + + +def test_source_public_key_delete_unimplemented(journalist_app, test_source): + with journalist_app.app_context(): + source = Source.query.first() + with pytest.raises(NotImplementedError): + del source.public_key + + def test_get_one_or_else_returns_one(journalist_app, test_journo): with journalist_app.app_context(): # precondition: there must be one journalist diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_journalist_api.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +import hashlib +import json +import os + +from pyotp import TOTP + +from flask import current_app, url_for +from itsdangerous import TimedJSONWebSignatureSerializer + +from db import db +from models import Journalist, Reply, Source, SourceStar, Submission + +os.environ['SECUREDROP_ENV'] = 'test' # noqa +from utils.api_helper import get_api_headers + + +def test_unauthenticated_user_gets_all_endpoints(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_endpoints')) + + observed_endpoints = json.loads(response.data) + expected_endpoints = [u'current_user_url', u'submissions_url', + u'sources_url', u'auth_token_url'] + assert expected_endpoints == observed_endpoints.keys() + + +def test_valid_user_can_get_an_api_token(journalist_app, test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert isinstance(Journalist.validate_api_token_and_get_user( + observed_response['token']), Journalist) is True + assert response.status_code == 200 + + +def test_user_cannot_get_an_api_token_with_wrong_password(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': 'wrong password', + 'one_time_code': valid_token}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert response.status_code == 403 + assert observed_response['error'] == 'Forbidden' + + +def test_user_cannot_get_an_api_token_with_wrong_2fa_token(journalist_app, + test_journo): + with journalist_app.test_client() as app: + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': '123456'}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert response.status_code == 403 + assert observed_response['error'] == 'Forbidden' + + +def test_user_cannot_get_an_api_token_with_no_passphase_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert response.status_code == 400 + assert observed_response['error'] == 'Bad Request' + assert observed_response['message'] == 'passphrase field is missing' + + +def test_user_cannot_get_an_api_token_with_no_username_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert response.status_code == 400 + assert observed_response['error'] == 'Bad Request' + assert observed_response['message'] == 'username field is missing' + + +def test_user_cannot_get_an_api_token_with_no_otp_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password']}), + headers=get_api_headers()) + observed_response = json.loads(response.data) + + assert response.status_code == 400 + assert observed_response['error'] == 'Bad Request' + assert observed_response['message'] == 'one_time_code field is missing' + + +def test_authorized_user_gets_all_sources(journalist_app, test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_sources'), + headers=get_api_headers(journalist_api_token)) + + data = json.loads(response.data) + + assert response.status_code == 200 + + # We expect to see our test source in the response + assert test_submissions['source'].journalist_designation == \ + data['sources'][0]['journalist_designation'] + + +def test_user_without_token_cannot_get_protected_endpoints(journalist_app, + test_submissions): + with journalist_app.app_context(): + uuid = test_submissions['source'].uuid + protected_routes = [ + url_for('api.get_all_sources'), + url_for('api.single_source', source_uuid=uuid), + url_for('api.all_source_submissions', source_uuid=uuid), + url_for('api.single_submission', source_uuid=uuid, + submission_uuid=test_submissions['submissions'][0].uuid), + url_for('api.download_submission', source_uuid=uuid, + submission_uuid=test_submissions['submissions'][0].uuid), + url_for('api.get_all_submissions'), + url_for('api.get_current_user') + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.get(protected_route, + headers=get_api_headers('')) + + assert response.status_code == 403 + + +def test_user_without_token_cannot_del_protected_endpoints(journalist_app, + test_submissions): + with journalist_app.app_context(): + uuid = test_submissions['source'].uuid + protected_routes = [ + url_for('api.single_source', source_uuid=uuid), + url_for('api.single_submission', source_uuid=uuid, + submission_uuid=test_submissions['submissions'][0].uuid), + url_for('api.remove_star', source_uuid=uuid), + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.delete(protected_route, + headers=get_api_headers('')) + + assert response.status_code == 403 + + +def test_attacker_cannot_create_valid_token_with_none_alg(journalist_app, + test_source, + test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + s = TimedJSONWebSignatureSerializer('not the secret key', + algorithm_name='none') + attacker_token = s.dumps({'id': test_journo['id']}).decode('ascii') + + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(attacker_token)) + + assert response.status_code == 403 + + +def test_attacker_cannot_use_token_after_admin_deletes(journalist_app, + test_source, + journalist_api_token): + + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + + # In a scenario where an attacker compromises a journalist workstation + # the admin should be able to delete the user and their token should + # no longer be valid. + attacker = Journalist.validate_api_token_and_get_user( + journalist_api_token) + + db.session.delete(attacker) + db.session.commit() + + # Now this token should not be valid. + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 403 + + +def test_user_without_token_cannot_post_protected_endpoints(journalist_app, + test_source): + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.post_reply', source_uuid=uuid), + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid) + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.post(protected_route, + headers=get_api_headers('')) + assert response.status_code == 403 + + +def test_api_404(journalist_app, journalist_api_token): + with journalist_app.test_client() as app: + response = app.get('/api/v1/invalidendpoint', + headers=get_api_headers(journalist_api_token)) + json_response = json.loads(response.data) + + assert response.status_code == 404 + assert json_response['error'] == 'Not Found' + + +def test_trailing_slash_cleanly_404s(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.get(url_for('api.single_source', + source_uuid=uuid) + '/', + headers=get_api_headers(journalist_api_token)) + json_response = json.loads(response.data) + + assert response.status_code == 404 + assert json_response['error'] == 'Not Found' + + +def test_authorized_user_gets_single_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + data = json.loads(response.data) + assert data['uuid'] == test_source['source'].uuid + assert 'BEGIN PGP PUBLIC KEY' in data['key']['public'] + + +def test_get_non_existant_source_404s(journalist_app, journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.single_source', source_uuid=1), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 404 + + +def test_authorized_user_can_flag_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.flag', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Verify that the source was flagged. + assert Source.query.get(source_id).flagged + + +def test_authorized_user_can_star_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 201 + + # Verify that the source was starred. + assert SourceStar.query.filter( + SourceStar.source_id == source_id).one().starred + + +def test_authorized_user_can_unstar_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 201 + + response = app.delete(url_for('api.remove_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + # Verify that the source is gone. + assert SourceStar.query.filter( + SourceStar.source_id == source_id).one().starred is False + + +def test_disallowed_methods_produces_405(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.delete(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + json_response = json.loads(response.data) + + assert response.status_code == 405 + assert json_response['error'] == 'Method Not Allowed' + + +def test_authorized_user_can_get_all_submissions(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_submissions'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + json_response = json.loads(response.data) + + observed_submissions = [submission['filename'] for + submission in json_response['submissions']] + + expected_submissions = [submission.filename for + submission in Submission.query.all()] + assert observed_submissions == expected_submissions + + +def test_authorized_user_get_source_submissions(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_submissions['source'].uuid + response = app.get(url_for('api.all_source_submissions', + source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + json_response = json.loads(response.data) + + observed_submissions = [submission['filename'] for + submission in json_response['submissions']] + + expected_submissions = [submission.filename for submission in + test_submissions['source'].submissions] + assert observed_submissions == expected_submissions + + +def test_authorized_user_can_get_single_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + response = app.get(url_for('api.single_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + json_response = json.loads(response.data) + + assert json_response['uuid'] == submission_uuid + assert json_response['is_read'] is False + assert json_response['filename'] == \ + test_submissions['source'].submissions[0].filename + assert json_response['size'] == \ + test_submissions['source'].submissions[0].size + + +def test_authorized_user_can_delete_single_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + response = app.delete(url_for('api.single_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Submission now should be gone. + assert Submission.query.filter( + Submission.uuid == submission_uuid).all() == [] + + +def test_authorized_user_can_delete_source_collection(journalist_app, + test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Source does not exist + assert Source.query.all() == [] + + +def test_authorized_user_can_download_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + + response = app.get(url_for('api.download_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Submission should now be marked as downloaded in the database + submission = Submission.query.get( + test_submissions['source'].submissions[0].id) + assert submission.downloaded + + # Response should be a PGP encrypted download + assert response.mimetype == 'application/pgp-encrypted' + + # Response should have Etag field with hash + assert response.headers['ETag'] == '"sha256:{}"'.format( + hashlib.sha256(response.data).hexdigest()) + + +def test_authorized_user_can_get_current_user_endpoint(journalist_app, + test_journo, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + json_response = json.loads(response.data) + assert json_response['is_admin'] is False + assert json_response['username'] == test_journo['username'] + + +def test_request_with_missing_auth_header_triggers_403(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }) + assert response.status_code == 403 + + +def test_request_with_auth_header_but_no_token_triggers_403(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers={ + 'Authorization': '', + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }) + assert response.status_code == 403 + + +def test_unencrypted_replies_get_rejected(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + reply_content = 'This is a plaintext reply' + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data=json.dumps({'reply': reply_content}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + source_id = test_source['source'].id + uuid = test_source['source'].uuid + + # First we must encrypt the reply, or it will get rejected + # by the server. + source_key = current_app.crypto_util.getkey( + test_source['source'].filesystem_id) + reply_content = current_app.crypto_util.gpg.encrypt( + 'This is a plaintext reply', source_key).data + + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data=json.dumps({'reply': reply_content}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 201 + + with journalist_app.app_context(): # Now verify everything was saved. + # Get most recent reply in the database + reply = Reply.query.order_by(Reply.id.desc()).first() + + assert reply.journalist_id == test_journo['id'] + assert reply.source_id == source_id + + source = Source.query.get(source_id) + + expected_filename = '{}-{}-reply.gpg'.format( + source.interaction_count, source.journalist_filename) + + expected_filepath = current_app.storage.path( + source.filesystem_id, expected_filename) + + with open(expected_filepath, 'rb') as fh: + saved_content = fh.read() + + assert reply_content == saved_content + + +def test_reply_without_content_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data=json.dumps({'reply': ''}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_without_reply_field_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data=json.dumps({'other': 'stuff'}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_without_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data='invalid', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_with_valid_curly_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data='{}', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + json_response = json.loads(response.data) + assert json_response['message'] == 'reply not found in request body' + + +def test_reply_with_valid_square_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.post_reply', source_uuid=uuid), + data='[]', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + json_response = json.loads(response.data) + assert json_response['message'] == 'reply not found in request body' diff --git a/securedrop/tests/utils/api_helper.py b/securedrop/tests/utils/api_helper.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/utils/api_helper.py @@ -0,0 +1,11 @@ +def get_api_headers(token=''): + if token: + return { + 'Authorization': 'Token {}'.format(token), + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } + return { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -146,6 +146,7 @@ def submit(source, num_submissions): submissions = [] for _ in range(num_submissions): source.interaction_count += 1 + source.pending = False fpath = current_app.storage.save_message_submission( source.filesystem_id, source.interaction_count, @@ -154,6 +155,7 @@ def submit(source, num_submissions): ) submission = models.Submission(source, fpath) submissions.append(submission) + db.session.add(source) db.session.add(submission) db.session.commit()
Journalist API In order to prototype the journalist reading room application, we first need to create an API such that the SecureDrop app server can talk to clients. This API would not need to go to prod until we want to put the reading room application in beta or prod. The advantage here is that as we prototype and we decide we don't like something we can easily modify it. Note: The API is intended to replace only the journalist actions for now, so there is no mention of the source interface actions or admin interface actions (e.g. creating users, etc.). What follows is a sketch of a simple REST API that is able to do the main functions (downloading docs, browsing through sources, submissions, etc.). If you feel strongly that the eventual API should _not_ be something like this, please feel free to comment. ## Versioning The API should be versioned such that if we decide to remove or modify functionality then we can do that and support new clients while preserving backwards compatibility. The base URL for the initial SecureDrop API will be: `/api/v1/` ## Content Type ``` 'Accept': 'application/json', 'Content-Type': 'application/json' ``` ## Authentication `POST /api/v1/token` to get a token with the username, password, and 2FA token in the request body. Thereafter in order to authenticate to protected endpints, send the token in HTTP header `Authorization`: `Authorization: Token myverylongtokengoeshere` This header will be checked with each API request to see if it is valid and not yet expired. # Endpoints ## Root Endpoint Does not require authentication. The root endpoint describes the available resources: `GET /api/v1/` Response 200 (application/json): ``` { "current_user_url": "/api/v1/user/", "sources_url": "/api/v1/sources/", "submissions_url": "/api/v1/submissions/" "token_url": "/api/v1/token/" } ``` ## Source [`/sources`] ### GET `/sources`: get list of all sources Requires authentication. Gives a list of all sources and data about them. GET `/api/v1/sources/` Response 200 (application/json): ``` { "sources": [ { "add_star_url": "/api/v1/sources/1/add_star/", "flagged": false, "interaction_count": 2, "journalist_designation": "uneventful vaporization", "last_updated": "Tue, 30 May 2017 03:31:07 GMT", "number_of_documents": 0, "number_of_messages": 2, "remove_star_url": "/api/v1/sources/1/remove_star/", "reply_url": "/api/v1/sources/1/reply/", "source_id": 1, "submissions_url": "/api/v1/sources/1/submissions/", "url": "/api/v1/sources/1/" }, { "add_star_url": "/api/v1/sources/2/add_star/", "flagged": false, "interaction_count": 1, "journalist_designation": "misanthropic widow", "last_updated": "Tue, 30 May 2017 03:31:17 GMT", "number_of_documents": 0, "number_of_messages": 1, "remove_star_url": "/api/v1/sources/2/remove_star/", "reply_url": "/api/v1/sources/2/reply/", "source_id": 2, "submissions_url": "/api/v1/sources/2/submissions/", "url": "/api/v1/sources/2/" } ] } ``` ### Individual Source [`/sources/<int:id>`] Requires authentication An object representing a single source. Response 200 (application/json): ``` { "add_star_url": "/api/v1/sources/1/add_star/", "flagged": false, "interaction_count": 2, "journalist_designation": "uneventful vaporization", "last_updated": "Tue, 30 May 2017 03:31:07 GMT", "number_of_documents": 0, "number_of_messages": 2, "remove_star_url": "/api/v1/sources/1/remove_star/", "reply_url": "/api/v1/sources/1/reply/", "source_id": 1, "submissions_url": "/api/v1/sources/1/submissions/", "url": "/api/v1/sources/1/" } ``` ### Get all documents and messages associated with a Source [GET] Requires authentication. `GET /api/v1/sources/<int:source_id>/submissions` Response 200 (application/json): ``` { "submissions": [ { "download_url": "/api/v1/sources/1/submissions/1/download/", "filename": "1-uneventful_vaporization-msg.gpg", "is_read": true, "size": 592, "source_url": "/api/v1/sources/1/", "submission_id": 1, "submission_url": "/api/v1/sources/1/submissions/1/" }, { "download_url": "/api/v1/sources/1/submissions/2/download/", "filename": "2-uneventful_vaporization-msg.gpg", "is_read": true, "size": 591, "source_url": "/api/v1/sources/1/", "submission_id": 2, "submission_url": "/api/v1/sources/1/submissions/2/" } ] } ``` ### Get a single submission or message associated with a Source [GET] Requires authentication. `GET /api/v1/sources/<int:source_id>/submissions/<int:submission_id>/` Response 200 (application/json): ``` { "download_url": "/api/v1/sources/1/submissions/1/download/", "filename": "1-uneventful_vaporization-msg.gpg", "is_read": true, "size": 592, "source_url": "/api/v1/sources/1/", "submission_id": 1, "submission_url": "/api/v1/sources/1/submissions/1/" } ``` ### Add a reply [POST] Requires authentication. `POST /api/v1/sources/<int:id>/` Response 201 created (application/json): ``` { "message": "Your reply has been stored" } ``` ### Delete a submission [DELETE] Requires authentication. `DELETE /api/v1/sources/<int:source_id>/submissions/<int:submission_id>/` Response 200: ``` { "message": "Submission deleted" } ``` ### Download a single submission [GET] Requires authentication. `GET /api/v1/sources/<int:source_id>/submissions/<int:submission_id>/download` Response 200 has `Content-Type: application/pgp-encrypted` ### Delete a Source and all their associated submissions Requires authentication. This is known as "delete collection" `DELETE /api/v1/sources/<int:source_id>/submissions` Response 200: ``` { "message": "Source and submissions deleted" } ``` ### Star a source Requires authentication. `POST /api/v1/sources/<int:source_id>/star/` Response 201 created: ``` { "message": "Star added" } ``` ### Remove a star Requires authentication. `DELETE /api/v1/sources/<int:source_id>/star/` Response 200: ``` { "message": "Star removed" } ``` ## Submission [`/submissions`] Requires authentication. Get all submissions. `GET /api/v1/submissions/` Response 200: ``` { "submissions": [ { "download_url": "/api/v1/sources/1/submissions/1/download/", "filename": "1-uneventful_vaporization-msg.gpg", "is_read": true, "size": 592, "source_url": "/api/v1/sources/1/", "submission_id": 1, "submission_url": "/api/v1/sources/1/submissions/1/" }, { "download_url": "/api/v1/sources/1/submissions/2/download/", "filename": "2-uneventful_vaporization-msg.gpg", "is_read": true, "size": 591, "source_url": "/api/v1/sources/1/", "submission_id": 2, "submission_url": "/api/v1/sources/1/submissions/2/" }, { "download_url": "/api/v1/sources/2/submissions/3/download/", "filename": "1-misanthropic_widow-msg.gpg", "is_read": true, "size": 589, "source_url": "/api/v1/sources/2/", "submission_id": 3, "submission_url": "/api/v1/sources/2/submissions/3/" } ] } ``` ## User [`/user/`] Requires authentication. An object representing the current user. `GET /api/v1/user/` Response 200: ``` { "user": { "is_admin": true, "last_login": "Tue, 30 May 2017 03:30:21 GMT", "username": "test" } } ``` ## TODOS Other actions that journalists can do that I have not yet specified yet but should be in an eventual journalist API: - [ ] Password and 2FA change for the current user - [ ] Bulk downloading of submissions - [ ] Change codename To debate: - [ ] Filter sources by string (this I think could potentially just be done on the client side, or we can implement it in the API, no major feelings either way) Comment if you have major thoughts on this so far. One other thought is that we could implement a logout endpoint to blacklist tokens (if we want to enable a user to invalidate a token before it expires). ## Implementation I've implemented this as written in the branch `journalist-api` if you want to play around with it. Unit tests for the API are in `securedrop/tests/test_api.py`.
Is https://github.com/freedomofpress/securedrop/commits/journalist-api up to date ? Or are there unpushed commit somewhere ? Nope everything that has been implemented thus far is in that branch. But let me know if something looks like it is missing. A minor question (no major thought just yet ;-) In <pre> GET /api/v1/sources/&lt;int:source_id>/submissions </pre> what is the rationale for having `{ "submissions": [ ... ] }` instead of `[] `? We might not want to expose the primary key of Sources through the JAPI. It can indicate how many Sources have used the system or when in relation to other Sources a particular Source was created. Especially in the case an adversary regularly creates Source accounts, and then compromises a non-admin Journalist account, this has the potential to leak de-anonymizing time-related info that would otherwise not be available. This is not something I have seen brought up before, and should probably factor into our re-write of the threat model document as well. Some things to consider: 1. Schema/ input validation. It looks like we don't have any plans to this regard yet. We have to be careful with input processing, even if there are a limited number of datatypes supported by JSON. There's only a draft spec for [JSON schema](http://json-schema.org/documentation.html), otherwise I might suggest we consider it. 2. (Re-iterating my last comment.) Compromise of a Journalists' account should not be completely outside of our threat model. We should at least do everything we can to protect deleted sources and inactive sources still in the system, even if there's little we can do to protect the data and metadata of active and especially new sources. This has to factor into the API architecture; non-admin journos need to be on a need-to-know basis with regards to metadata and any information that might provide little value to an honest journalist, but great value to an adversary trying to de-anonymize sources. 3. JSON parsing is problematic (see http://seriot.ch/parsing_json.php and http://seriot.ch/json/parsing.html#43). 4. How can we update the API over time allowing for slight skew in upgrade status of clients and the server? 5. Consider protocol buffers or another mature, verifiable, schematic serialization solution. In addition to providing a solution to 1, 3, and 4, protocol buffers also conserve bandwidth (especially important to the journalist experience given they're communicating with an onion service--a 15-hop round trip). Most of these points are fine to ignore for the purpose of prototyping, but should be addressed by a production-ready system. Quick note: since some news organizations are interested to use an API on the journalist interface for developing custom software for their organization's SecureDrop, it should stay JSON for simplicity and should be well-documented for other developers (thought: news organizations with the engineering time and interest to develop software for SecureDrop internally might be interested in upstreaming some of their improvements such that the wider SecureDrop ecosystem can benefit). @redshiftzero and I would like to consider work on this for the next two week FPF development sprint (kick-off 4/4) but it would be good to first develop more consensus on what is achievable/desirable - within a couple of weeks - by the 0.7 release (due May 8) - after the 0.7 release. My take: It sounds like we have a solid prototype in the `journalist-api` branch that will require extensive pen-testing and hardening to be production-ready. If we're comfortable that the core features of the API may expand but are unlikely to change dramatically, breaking this pen-testing and hardening down into tasks we can handle for the next sprint seems like the most logical near-term step to me. As for shipping this with 0.7, I'm curious how folks feel about an off-by-default approach, provided that we do feel the code is production ready at release time. This would speak to Jen's use case above: >some news organizations are interested to use an API on the journalist interface for developing custom software for their organization's SecureDrop .. and to near-term work with selected news organizations on testing the alpha releases of the Qubes OS SD Workstation that will consume this API. "Off-by-default" would not mean "this is not production-ready". The only reason to not enable it for everyone would be to minimize the attack surface for users who don't actually intend to use it yet. Thoughts? I think some reasonable goals for the next sprint might be: - [ ] Rebase branch on current `develop` (this was written pre-Blueprint refactor and a bunch of other major changes to the app code) - [ ] Critically examine the unit test suite, are at least all the base cases covered? - [ ] (Stretch) Security evaluation of the API, file followup tickets (i.e. running through [this](https://www.owasp.org/index.php/REST_Security_Cheat_Sheet) is a good start) In terms of having this feature be off by default. If we did that, I'm imagining we'd do something like this: 1. Manage config option for enabling the API in `site-specific` 2. Administrators would need to run `./securedrop-admin sdconfig` and `./securedrop-admin install` to turn this option on I think this is actually a worse workflow from the user perspective than simply adding an API via the `securedrop-app-code` deb package, which requires no admin involvement. So I suggest to make this decision we should: 1. Evaluate the security of the API (cc @emkll 😇) and, referencing the threat model, determine how much additional risk there really is from a feature like this. 2. IF we determine that there is significant additional risk, we (+ SecureDrop UX team) talk to some admins and see what users think. Do they want to be able to turn off the API? Even though this introduces admin maintenance burden when they have a journalist that wants to start using the Qubes workstation? Generally I'm of the opinion that for the most part, it's our job to evaluate the risk and make application architecture decisions that minimize risk to users overall, but that said - if users do want more flexibility in controlling the application, then we should respect that. PS: Earlier in this thread the comment about not exposing primary keys via the API I agree with (we should either use the `journalist_designation` or the `filesystem_id` (as we do in the existing web application)), so that's one additional change to make. I agree with not exposing the primary key. You can find another good list of recommentations [here](https://github.com/shieldfy/API-Security-Checklist/blob/master/README.md). Here are some initial thoughts on this, after taking a quick glance at the journalist-api branch and the spec @redshiftzero described above. First, areas where risks could be reduced: - For a client, using an API at the command line would eliminate browser-based attacks, though a significantly larger risk is malware submission through SecureDrop to compromise this same computer. Therefore, benefits are limited for Journalists. However, they would be more significant for Sources, if we were to implement a Source API. - Using a client can also provide the opportunity and control to implement new security features: - Certificate pinning and ssl/tls for the Journalist interface. - Client certificates to authenticate clients, for an added layer of channel authentication/encryption Here are the the areas where I think incremental risk is introduced: - We will be developing/maintaining sensitive (though fairly simple) parts of the the code, specifically authentication, which can increase the risk of developer error. - The API would expose a duplicate set of functionality, and maybe difficult to use the API for the web application (due to limited use of JavaScript), increasing attack surface, probability of developer error and maintenance burden (client certs would be a great way to mitigate this). - Much of this incremental risk will also depend on the client application. Modern browsers have sandboxing capabilities, which will likely not be present in more lightweight/native UI frameworks. For that reason, I think that the client directly handling decrypted, un-sanized submissions is out of the question, and that sanitization should occur outside the domain of client application. - It is critical we use 2-Factor authentication and fairly short sessions, given the upcoming SecureDrop workstation use case, where we are introducing persistence to the journalist environment. There would be other ways to implement authentication but I think the method described above makes the most sense: - OAuth - definitely overkill as we won't use federation and it's quite complex protocol. - Signed requests: Using a asymmetric key pair to sign your API request. Key management might be a bit of a challenge, but it could be an interesting incremental improvement to the authentication header approach.
2018-06-29T20:45:17Z
[]
[]
freedomofpress/securedrop
3,648
freedomofpress__securedrop-3648
[ "3635" ]
10e32435714e5595df09146fcaf9baa0f510932a
diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -49,7 +49,7 @@ class Source(db.Model): filesystem_id = Column(String(96), unique=True) journalist_designation = Column(String(255), nullable=False) flagged = Column(Boolean, default=False) - last_updated = Column(DateTime, default=datetime.datetime.utcnow) + last_updated = Column(DateTime) star = relationship("SourceStar", uselist=False, backref="source") # sources are "pending" and don't get displayed to journalists until they
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -2,6 +2,7 @@ """Testing utilities that involve database (and often related filesystem) interaction. """ +import datetime import mock import os @@ -143,6 +144,8 @@ def submit(source, num_submissions): :returns: A list of the :class:`models.Submission`s submitted. """ assert num_submissions >= 1 + source.last_updated = datetime.datetime.utcnow() + db.session.add(source) submissions = [] for _ in range(num_submissions): source.interaction_count += 1
journalist notification incorrectly claims submissions were received ## Description A journalist notification mail is sent claiming a submission was received in the past 24h. But after checking the journalist interface, there is nothing. ## Steps to Reproduce * drain the source of entropy * as a new source submit a document and check that the source is flagged because no new key was created * wait 24h and see a journalist notification is mailed because there is a new submission * login back as the source using the same passphrase and check that the source is no longer flagged * wait 24h and see a journalist notification is mailed although there is no new submission ## Expected Behavior * no journalist notification is sent when a source key generation is delayed ## Actual Behavior * a journalist notification is sent when a source key generation is delayed
> It could because a source removed a message and that changes the last update field. But no because `securedrop/source_app/main.py def delete():` does not change `last_updated` `securedrop/source_app/utils.py async_genkey` changes `last_updated` so the following can happen: * A source makes a submission but is flagged for reply because there is not enough entropy * A few days later the source comes back and uses its passphrase * The `securedrop/source_app/main.py lookup` method calls `async_genkey` to generate the key and sets `last_updated` * The generation of the key is mistaken by `securedrop/manage.py were_there_submissions_today` to mean a new submission happened Yesterday there was a case of a directory created in /var/lib/securedrop/store directory created and empty. There seems to be a condition where that can happen but I wonder when... **dang** When a source is created the **last_updated** field is set to the date of creation: <pre> last_updated = Column(DateTime, default=datetime.datetime.utcnow) </pre> and since a new entry is created every time someone clicks on **Submit Documents** and clicks on `/generate`, it counts as an activity which is mistaken as a submission. I propose the `last_updated` date is always set to zero. Since the source is `pending` in this case, it does not show at all in the journalist interface and the `last_updated` field does not need to reflect the date at which the source was created. Thoughts?
2018-07-20T17:04:58Z
[]
[]
freedomofpress/securedrop
3,652
freedomofpress__securedrop-3652
[ "3651" ]
7dbd5ef4b7c9d3a564074a5d2a49363a20196606
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -65,6 +65,21 @@ def get_endpoints(): 'auth_token_url': '/api/v1/token'} return jsonify(endpoints), 200 + # Before every post, we validate the payload before processing the request + @api.before_request + def validate_data(): + if request.method == 'POST': + # flag and star can have empty payloads + if not request.data: + if ('flag' not in request.path and 'star' not in request.path): + return abort(400, 'malformed request') + # other requests must have valid JSON payload + else: + try: + json.loads(request.data) + except (ValueError): + return abort(400, 'malformed request') + @api.route('/token', methods=['POST']) def get_token(): creds = json.loads(request.data)
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -228,7 +228,8 @@ def test_user_without_token_cannot_post_protected_endpoints(journalist_app, with journalist_app.test_client() as app: for protected_route in protected_routes: response = app.post(protected_route, - headers=get_api_headers('')) + headers=get_api_headers(''), + data=json.dumps({'some': 'stuff'})) assert response.status_code == 403 @@ -594,3 +595,66 @@ def test_reply_with_valid_square_json_400(journalist_app, journalist_api_token, json_response = json.loads(response.data) assert json_response['message'] == 'reply not found in request body' + + +def test_malformed_json_400(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.get_token'), + url_for('api.post_reply', source_uuid=uuid), + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="{this is invalid {json!", + headers=get_api_headers(journalist_api_token)) + observed_response = json.loads(response.data) + + assert response.status_code == 400 + assert observed_response['error'] == 'Bad Request' + + +def test_empty_json_400(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.get_token'), + url_for('api.post_reply', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="", + headers=get_api_headers(journalist_api_token)) + observed_response = json.loads(response.data) + + assert response.status_code == 400 + assert observed_response['error'] == 'Bad Request' + + +def test_empty_json_20X(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="", + headers=get_api_headers(journalist_api_token)) + + assert response.status_code in (200, 201)
Journalist API returns error 500 when JSON in malformed or absent ## Description ## Steps to Reproduce 1. `make dev` 2. `curl -X POST 127.0.0.1:8081/api/v1/token` ## Expected Behavior The server should return an error 400 stating that the request is invalid. ## Actual Behavior The server returns an error 500 stating that there is a server error.
2018-07-24T18:49:30Z
[]
[]
freedomofpress/securedrop
3,667
freedomofpress__securedrop-3667
[ "3666" ]
550da6a4a3ab649b9bd4d0bc8bc2be4740daf700
diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -123,12 +123,17 @@ def to_json(self): else: last_updated = datetime.datetime.utcnow().isoformat() + 'Z' + if self.star and self.star.starred: + starred = True + else: + starred = False + json_source = { 'uuid': self.uuid, 'url': url_for('api.single_source', source_uuid=self.uuid), 'journalist_designation': self.journalist_designation, 'is_flagged': self.flagged, - 'is_starred': True if self.star else False, + 'is_starred': starred, 'last_updated': last_updated, 'interaction_count': self.interaction_count, 'key': {
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -306,6 +306,12 @@ def test_authorized_user_can_star_a_source(journalist_app, test_source, assert SourceStar.query.filter( SourceStar.source_id == source_id).one().starred + # API should also report is_starred is true + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + json_response = json.loads(response.data) + assert json_response['is_starred'] is True + def test_authorized_user_can_unstar_a_source(journalist_app, test_source, journalist_api_token): @@ -324,6 +330,12 @@ def test_authorized_user_can_unstar_a_source(journalist_app, test_source, assert SourceStar.query.filter( SourceStar.source_id == source_id).one().starred is False + # API should also report is_starred is false + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + json_response = json.loads(response.data) + assert json_response['is_starred'] is False + def test_disallowed_methods_produces_405(journalist_app, test_source, journalist_api_token):
journalist API: incorrect `is_starred` reported when starring and unstarring a source ## Description Bug found by @kushaldas over in #3619: > In my test, I removed a start from a source, I got back this reply: {'message': 'Star removed'}. After this when I am trying to get all the sources or that particular source again, I can still see 'is_starred': True for that source. ## Steps to Reproduce Note: you'll need to put in the UUID of a source in your dev env in place of the one shown here 1. Get a source via the API: GET `127.0.0.1:8081/api/v1/sources/917c3b91-29f9-46a0-8e3b-0ba6af5fc9f5` 2. Star the source: POST `127.0.0.1:8081/api/v1/sources/917c3b91-29f9-46a0-8e3b-0ba6af5fc9f5/add_star` 3. Unstar the source: DELETE `127.0.0.1:8081/api/v1/sources/917c3b91-29f9-46a0-8e3b-0ba6af5fc9f5/remove_star` 4. Get the source once more: GET `127.0.0.1:8081/api/v1/sources/917c3b91-29f9-46a0-8e3b-0ba6af5fc9f5` ## Expected Behavior `is_starred: False` ## Actual Behavior `is_starred: True` ## Comments simple fix, needs at minimum an additional assert in tests to cover this case
2018-07-27T20:36:38Z
[]
[]
freedomofpress/securedrop
3,672
freedomofpress__securedrop-3672
[ "3671" ]
a4c0252be752fe48224cd080559a1b66e051b761
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -3,6 +3,8 @@ import datetime import os +import sys +import argparse from sqlalchemy.exc import IntegrityError os.environ["SECUREDROP_ENV"] = "dev" # noqa @@ -74,10 +76,19 @@ def create_source_and_submissions(num_submissions=2): test_password = "correct horse battery staple profanity oil chewy" test_otp_secret = "JHCOGO7VCER3EJ4L" + parser = argparse.ArgumentParser() + parser.add_argument("--staging", help="Adding user for staging tests.", + action="store_true") + args = parser.parse_args() add_test_user("journalist", test_password, test_otp_secret, is_admin=True) + + # If staging, we only need the journalist user (admin) + if args.staging: + sys.exit(0) + add_test_user("dellsberg", test_password, test_otp_secret,
diff --git a/securedrop/tests/functional/README.md b/securedrop/tests/functional/README.md --- a/securedrop/tests/functional/README.md +++ b/securedrop/tests/functional/README.md @@ -3,7 +3,7 @@ - `sudo -u www-data bash` - `cd /var/wwww/securedrop/` - `./manage.py reset` # This will clean the DB for testing -- `./create-demo-user.py` +- `./create-dev-data.py --staging` Update this information to the `tests/functional/instance_information.json file.
[functional testing] create-dev-data.py should get one user for stage based testing ## Description `create-dev-data.py` should get a `--staging` flag to create only one user which can be used by Tor based functional tests. Part of #3488 ## User Stories I am a SecureDrop developer and I want an easy way to add one user for functional tests in my staging or production instance.
2018-07-31T17:03:49Z
[]
[]
freedomofpress/securedrop
3,688
freedomofpress__securedrop-3688
[ "3674" ]
a761f5b20c46ae643ed37bf4c2d1c1210c70f105
diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -67,18 +67,21 @@ def process(): return method(cols_selected) @view.route('/<filesystem_id>/<fn>') - def download_single_submission(filesystem_id, fn): - """Sends a client the contents of a single submission.""" + def download_single_file(filesystem_id, fn): + """Sends a client the contents of a single file, either a submission + or a journalist reply""" if '..' in fn or fn.startswith('/'): abort(404) - try: - Submission.query.filter( - Submission.filename == fn).one().downloaded = True - db.session.commit() - except NoResultFound as e: - current_app.logger.error( - "Could not mark " + fn + " as downloaded: %s" % (e,)) + # only mark as read when it's a submission (and not a journalist reply) + if not fn.endswith('reply.gpg'): + try: + Submission.query.filter( + Submission.filename == fn).one().downloaded = True + db.session.commit() + except NoResultFound as e: + current_app.logger.error( + "Could not mark " + fn + " as downloaded: %s" % (e,)) return send_file(current_app.storage.path(filesystem_id, fn), mimetype="application/pgp-encrypted")
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1259,7 +1259,7 @@ def test_admin_page_restriction_http_posts(journalist_app, test_journo): def test_user_authorization_for_gets(journalist_app): urls = [url_for('main.index'), url_for('col.col', filesystem_id='1'), - url_for('col.download_single_submission', + url_for('col.download_single_file', filesystem_id='1', fn='1'), url_for('account.edit')] @@ -1344,6 +1344,26 @@ def test_passphrase_migration_on_reset(journalist_app): assert journalist.valid_password(VALID_PASSWORD) +def test_journalist_reply_view(journalist_app, test_source, test_journo): + source, _ = utils.db_helper.init_source() + journalist, _ = utils.db_helper.init_journalist() + submissions = utils.db_helper.submit(source, 1) + replies = utils.db_helper.reply(journalist, source, 1) + + subm_url = url_for('col.download_single_file', + filesystem_id=submissions[0].source.filesystem_id, + fn=submissions[0].filename) + reply_url = url_for('col.download_single_file', + filesystem_id=replies[0].source.filesystem_id, + fn=replies[0].filename) + + with journalist_app.test_client() as app: + resp = app.get(subm_url) + assert resp.status_code == 302 + resp = app.get(reply_url) + assert resp.status_code == 302 + + class TestJournalistApp(TestCase): # A method required by flask_testing.TestCase
[reply refactor] Allow journalists to download replies from journalist interface After #3673 is implemented, we should allow journalists to download replies from the journalist interface UI. Note that for long-running SecureDrop instances, there will be old replies encrypted only to the source key that should be unavailable for download. Epic: #3097
2018-08-02T17:25:09Z
[]
[]
freedomofpress/securedrop
3,690
freedomofpress__securedrop-3690
[ "3675" ]
1a8a43a9880827e474c6045fd9a564cc46961f7e
diff --git a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py @@ -0,0 +1,66 @@ +"""add column to track source deletion of replies + +Revision ID: e0a525cbab83 +Revises: 2d0ce3ee5bdc +Create Date: 2018-08-02 00:07:59.242510 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e0a525cbab83' +down_revision = '2d0ce3ee5bdc' +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table('replies', 'replies_tmp') + + # Add new column. + op.add_column('replies_tmp', + sa.Column('deleted_by_source', sa.Boolean())) + + # Populate deleted_by_source column in replies_tmp table. + conn = op.get_bind() + replies = conn.execute( + sa.text("SELECT * FROM replies_tmp")).fetchall() + + for reply in replies: + id = reply.id + conn.execute( + sa.text("""UPDATE replies_tmp + SET deleted_by_source=('0') + WHERE id={}""".format(id))) + + # Now create new table with not null constraint applied to + # deleted_by_source. + op.create_table('replies', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('journalist_id', sa.Integer(), nullable=True), + sa.Column('source_id', sa.Integer(), nullable=True), + sa.Column('filename', sa.String(length=255), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('deleted_by_source', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), + sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), + sa.PrimaryKeyConstraint('id') + ) + + # Data Migration: move all replies into the new table. + conn.execute(''' + INSERT INTO replies + SELECT id, journalist_id, source_id, filename, size, deleted_by_source + FROM replies_tmp + ''') + + # Now delete the old table. + op.drop_table('replies_tmp') + + +def downgrade(): + with op.batch_alter_table('replies', schema=None) as batch_op: + batch_op.drop_column('deleted_by_source') diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -217,6 +217,8 @@ class Reply(db.Model): filename = Column(String(255), nullable=False) size = Column(Integer, nullable=False) + deleted_by_source = Column(Boolean, default=False, nullable=False) + def __init__(self, journalist, source, filename): self.journalist_id = journalist.id self.source_id = source.id diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -10,7 +10,6 @@ from db import db from models import Source, Submission, Reply, get_one_or_else -from rm import srm from source_app.decorators import login_required from source_app.utils import (logged_in, generate_unique_codename, async_genkey, normalize_timestamps, @@ -76,7 +75,10 @@ def create(): @login_required def lookup(): replies = [] - for reply in g.source.replies: + source_inbox = Reply.query.filter(Reply.source_id == g.source.id) \ + .filter(Reply.deleted_by_source == False).all() # noqa + + for reply in source_inbox: reply_path = current_app.storage.path( g.filesystem_id, reply.filename, @@ -203,11 +205,16 @@ def submit(): @view.route('/delete', methods=('POST',)) @login_required def delete(): + """This deletes the reply from the source's inbox, but preserves + the history for journalists such that they can view conversation + history. + """ + query = Reply.query.filter( Reply.filename == request.form['reply_filename']) reply = get_one_or_else(query, current_app.logger, abort) - srm(current_app.storage.path(g.filesystem_id, reply.filename)) - db.session.delete(reply) + reply.deleted_by_source = True + db.session.add(reply) db.session.commit() flash(gettext("Reply deleted"), "notification") @@ -216,15 +223,16 @@ def delete(): @view.route('/delete-all', methods=('POST',)) @login_required def batch_delete(): - replies = g.source.replies + replies = Reply.query.filter(Reply.source_id == g.source.id) \ + .filter(Reply.deleted_by_source == False).all() # noqa if len(replies) == 0: current_app.logger.error("Found no replies when at least one was " "expected") return redirect(url_for('.lookup')) for reply in replies: - srm(current_app.storage.path(g.filesystem_id, reply.filename)) - db.session.delete(reply) + reply.deleted_by_source = True + db.session.add(reply) db.session.commit() flash(gettext("All replies have been deleted"), "notification")
diff --git a/securedrop/tests/migrations/migration_e0a525cbab83.py b/securedrop/tests/migrations/migration_e0a525cbab83.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_e0a525cbab83.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid.uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the deleted_by_source column now exists, + and that the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size) + VALUES (:journalist_id, :source_id, :filename, :size) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + assert len(replies) == self.JOURNO_NUM - 1 + + for reply in replies: + assert reply.deleted_by_source == False # noqa + + +class DowngradeTester(): + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the deleted_by_source column is now gone, and + otherwise the table has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + + for reply in replies: + try: + # This should produce an exception, as the column (should) + # be gone. + assert reply['deleted_by_source'] is None + except NoSuchColumnError: + pass + + assert len(replies) == self.JOURNO_NUM - 1 diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -441,11 +441,6 @@ def helper_test_reply(self, test_reply, expected_success=True): self.assertEqual(resp.status_code, 200) self.assertIn("Reply deleted", resp.data) - # Make sure the reply is deleted from the filesystem - utils.async.wait_for_assertion( - lambda: self.assertFalse(os.path.exists( - current_app.storage.path(filesystem_id, msgid)))) - app.get('/logout') @patch('source_app.main.async_genkey') diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -14,7 +14,7 @@ import version from db import db -from models import Source +from models import Source, Reply from source_app import main as source_app_main from utils.db_helper import new_codename from utils.instrument import InstrumentedApp @@ -394,6 +394,7 @@ def test_delete_all_successfully_deletes_replies(source_app): with source_app.app_context(): journalist, _ = utils.db_helper.init_journalist() source, codename = utils.db_helper.init_source() + source_id = source.id utils.db_helper.reply(journalist, source, 1) with source_app.test_client() as app: @@ -406,8 +407,41 @@ def test_delete_all_successfully_deletes_replies(source_app): text = resp.data.decode('utf-8') assert "All replies have been deleted" in text + with source_app.app_context(): + source = Source.query.get(source_id) + replies = Reply.query.filter(Reply.source_id == source_id).all() + for reply in replies: + assert reply.deleted_by_source is True + + +def test_delete_all_replies_deleted_by_source_but_not_journalist(source_app): + """Replies can be deleted by a source, but not by journalists. As such, + replies may still exist in the replies table, but no longer be visible.""" + with source_app.app_context(): + journalist, _ = utils.db_helper.init_journalist() + source, codename = utils.db_helper.init_source() + utils.db_helper.reply(journalist, source, 1) + replies = Reply.query.filter(Reply.source_id == source.id).all() + for reply in replies: + reply.deleted_by_source = True + db.session.add(reply) + db.session.commit() + + with source_app.test_client() as app: + with patch.object(source_app.logger, 'error') as logger: + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.post(url_for('main.batch_delete'), + follow_redirects=True) + assert resp.status_code == 200 + logger.assert_called_once_with( + "Found no replies when at least one was expected" + ) + -def test_delete_all_replies_already_deleted(source_app): +def test_delete_all_replies_already_deleted_by_journalists(source_app): with source_app.app_context(): journalist, _ = utils.db_helper.init_journalist() source, codename = utils.db_helper.init_source()
[reply refactor] Preserve conversation history between sources and journalists In order for journalists to view the conversation history between sources and journalists, when a source deletes the journalist reply from the _source inbox_, it should not delete the reply from the _journalist's_ inbox. This is a major UX win for journalists while preserving the current behavior of minimizing data available if a source codename is compromised. We could do this by adding a column to the replies table that `allow_source_read` (`default=True`) or something similar that we can filter by when showing replies to sources. When the source deletes the reply from their inbox, we set this to `False`. Epic: #3097
In #3097 it was noted that the message in the source interface ("[Deleting replies] also lets us know that you are aware of our reply") is inaccurate. If we have the `allow_source_read` column, we could potentially also use this to indicate to the journalist that a reply has been read and acknowledged by the source. At the risk of complicating things, I think there's a case to do so, since a savvy journalist who paid attention to the deletion status of replies to infer source activity would no longer be able to do so after this change. I suggest we add a follow-up issue for that, where we could discuss the best way to address the UX (both for the web interface and the graphical client). If we don't get to that before 0.9.0, we can remove the inaccurate sentence from the source interface for now.
2018-08-03T01:41:37Z
[]
[]
freedomofpress/securedrop
3,700
freedomofpress__securedrop-3700
[ "3676" ]
65f7b3fb7164d91eb8aeac63abeb51ac30fb0b69
diff --git a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py --- a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py +++ b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py @@ -30,11 +30,11 @@ def upgrade(): sources = conn.execute(sa.text("SELECT * FROM sources_tmp")).fetchall() for source in sources: - id = source.id - source_uuid = str(uuid.uuid4()) conn.execute( - sa.text("UPDATE sources_tmp SET uuid=('{}') WHERE id={}".format( - source_uuid, id))) + sa.text("""UPDATE sources_tmp SET uuid=:source_uuid WHERE + id=:id""").bindparams(source_uuid=str(uuid.uuid4()), + id=source.id) + ) # Now create new table with unique constraint applied. op.create_table(quoted_name('sources', quote=False), diff --git a/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py @@ -0,0 +1,70 @@ +"""add reply UUID + +Revision ID: 6db892e17271 +Revises: e0a525cbab83 +Create Date: 2018-08-06 20:31:50.035066 + +""" +from alembic import op +import sqlalchemy as sa + +import uuid + +# revision identifiers, used by Alembic. +revision = '6db892e17271' +down_revision = 'e0a525cbab83' +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table('replies', 'replies_tmp') + + # Add new column. + op.add_column('replies_tmp', sa.Column('uuid', sa.String(length=36))) + + # Populate new column in replies_tmp table. + conn = op.get_bind() + replies = conn.execute( + sa.text("SELECT * FROM replies_tmp")).fetchall() + + for reply in replies: + conn.execute( + sa.text("""UPDATE replies_tmp SET uuid=:reply_uuid WHERE + id=:id""").bindparams(reply_uuid=str(uuid.uuid4()), + id=reply.id) + ) + + # Now create new table with constraints applied to UUID column. + op.create_table('replies', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('journalist_id', sa.Integer(), nullable=True), + sa.Column('source_id', sa.Integer(), nullable=True), + sa.Column('filename', sa.String(length=255), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('deleted_by_source', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), + sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid'), + ) + + # Data Migration: move all replies into the new table. + conn.execute(''' + INSERT INTO replies + SELECT id, uuid, journalist_id, source_id, filename, size, + deleted_by_source + FROM replies_tmp + ''') + + # Now delete the old table. + op.drop_table('replies_tmp') + + +def downgrade(): + with op.batch_alter_table('replies', schema=None) as batch_op: + batch_op.drop_column('uuid') + + # ### end Alembic commands ### diff --git a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py --- a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py +++ b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py @@ -30,11 +30,10 @@ def upgrade(): sa.text("SELECT * FROM replies_tmp")).fetchall() for reply in replies: - id = reply.id conn.execute( - sa.text("""UPDATE replies_tmp - SET deleted_by_source=('0') - WHERE id={}""".format(id))) + sa.text("""UPDATE replies_tmp SET deleted_by_source=0 WHERE + id=:id""").bindparams(id=reply.id) + ) # Now create new table with not null constraint applied to # deleted_by_source. diff --git a/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py @@ -0,0 +1,74 @@ +"""add UUID column for users table + +Revision ID: f2833ac34bb6 +Revises: 6db892e17271 +Create Date: 2018-08-13 18:10:19.914274 + +""" +from alembic import op +import sqlalchemy as sa +import uuid + + +# revision identifiers, used by Alembic. +revision = 'f2833ac34bb6' +down_revision = '6db892e17271' +branch_labels = None +depends_on = None + + +def upgrade(): + # Save existing journalist table. + op.rename_table('journalists', 'journalists_tmp') + + # Add UUID column. + op.add_column('journalists_tmp', sa.Column('uuid', sa.String(length=36))) + + # Add UUIDs to journalists_tmp table. + conn = op.get_bind() + journalists = conn.execute( + sa.text("SELECT * FROM journalists_tmp")).fetchall() + + for journalist in journalists: + conn.execute( + sa.text("""UPDATE journalists_tmp SET uuid=:journalist_uuid WHERE + id=:id""").bindparams(journalist_uuid=str(uuid.uuid4()), + id=journalist.id) + ) + + # Now create new table with unique constraint applied. + op.create_table('journalists', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('pw_salt', sa.Binary(), nullable=True), + sa.Column('pw_hash', sa.Binary(), nullable=True), + sa.Column('passphrase_hash', sa.String(length=256), nullable=True), + sa.Column('is_admin', sa.Boolean(), nullable=True), + sa.Column('otp_secret', sa.String(length=16), nullable=True), + sa.Column('is_totp', sa.Boolean(), nullable=True), + sa.Column('hotp_counter', sa.Integer(), nullable=True), + sa.Column('last_token', sa.String(length=6), nullable=True), + sa.Column('created_on', sa.DateTime(), nullable=True), + sa.Column('last_access', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username'), + sa.UniqueConstraint('uuid') + ) + + conn = op.get_bind() + conn.execute(''' + INSERT INTO journalists + SELECT id, uuid, username, pw_salt, pw_hash, passphrase_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access + FROM journalists_tmp + ''') + + # Now delete the old table. + op.drop_table('journalists_tmp') + + +def downgrade(): + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.drop_column('uuid') diff --git a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py --- a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py +++ b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py @@ -30,12 +30,11 @@ def upgrade(): sa.text("SELECT * FROM submissions_tmp")).fetchall() for submission in submissions: - id = submission.id - submission_uuid = str(uuid.uuid4()) conn.execute( - sa.text("""UPDATE submissions_tmp - SET uuid=('{}') - WHERE id={}""".format(submission_uuid, id))) + sa.text("""UPDATE submissions_tmp SET uuid=:submission_uuid WHERE + id=:id""").bindparams(submission_uuid=str(uuid.uuid4()), + id=submission.id) + ) # Now create new table with unique constraint applied. op.create_table('submissions', diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -9,7 +9,7 @@ import journalist_app from sdconfig import config from db import db -from models import Journalist, Source, Submission +from models import Journalist, Reply, Source, Submission def add_test_user(username, password, otp_secret, is_admin=False): @@ -33,7 +33,7 @@ def add_test_user(username, password, otp_secret, is_admin=False): context.pop() -def create_source_and_submissions(num_submissions=2): +def create_source_and_submissions(num_submissions=2, num_replies=2): app = journalist_app.create_app(config) with app.app_context(): @@ -63,10 +63,26 @@ def create_source_and_submissions(num_submissions=2): submission = Submission(source, fpath) db.session.add(submission) + # Generate some test replies + for _ in range(num_replies): + source.interaction_count += 1 + fname = "{}-{}-reply.gpg".format(source.interaction_count, + source.journalist_filename) + app.crypto_util.encrypt( + str(os.urandom(1)), + [app.crypto_util.getkey(source.filesystem_id), + config.JOURNALIST_KEY], + app.storage.path(source.filesystem_id, fname)) + + journalist = Journalist.query.first() + reply = Reply(journalist, source, fname) + db.session.add(reply) + db.session.commit() - print("Test source '{}' added with {} submissions".format( - journalist_designation, num_submissions) - ) + + print("Test source '{}' added with {} submissions " + "and {} replies".format(journalist_designation, num_submissions, + num_replies)) if __name__ == "__main__": # pragma: no cover diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -1,10 +1,9 @@ from datetime import datetime, timedelta from functools import wraps -import hashlib import json from werkzeug.exceptions import default_exceptions # type: ignore -from flask import abort, Blueprint, current_app, jsonify, request, send_file +from flask import abort, Blueprint, current_app, jsonify, request from db import db from journalist_app import utils @@ -62,6 +61,7 @@ def get_endpoints(): endpoints = {'sources_url': '/api/v1/sources', 'current_user_url': '/api/v1/user', 'submissions_url': '/api/v1/submissions', + 'replies_url': '/api/v1/replies', 'auth_token_url': '/api/v1/token'} return jsonify(endpoints), 200 @@ -176,22 +176,23 @@ def download_submission(source_uuid, submission_uuid): submission.downloaded = True db.session.commit() - response = send_file(current_app.storage.path(source.filesystem_id, - submission.filename), - mimetype="application/pgp-encrypted", - as_attachment=True, - add_etags=False) # Disable Flask default ETag + return utils.serve_file_with_etag(source, submission.filename) - response.direct_passthrough = False - response.headers['Etag'] = '"sha256:{}"'.format( - hashlib.sha256(response.get_data()).hexdigest()) - return response + @api.route('/sources/<source_uuid>/replies/<reply_uuid>/download', + methods=['GET']) + @token_required + def download_reply(source_uuid, reply_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) + + return utils.serve_file_with_etag(source, reply.filename) @api.route('/sources/<source_uuid>/submissions/<submission_uuid>', methods=['GET', 'DELETE']) @token_required def single_submission(source_uuid, submission_uuid): if request.method == 'GET': + source = get_or_404(Source, source_uuid, column=Source.uuid) submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) return jsonify(submission.to_json()), 200 @@ -203,40 +204,60 @@ def single_submission(source_uuid, submission_uuid): submission) return jsonify({'message': 'Submission deleted'}), 200 - @api.route('/sources/<source_uuid>/reply', methods=['POST']) + @api.route('/sources/<source_uuid>/replies', methods=['GET', 'POST']) @token_required - def post_reply(source_uuid): - source = get_or_404(Source, source_uuid, - column=Source.uuid) - if request.json is None: - abort(400, 'please send requests in valid JSON') - - if 'reply' not in request.json: - abort(400, 'reply not found in request body') - - user = get_user_object(request) - - data = json.loads(request.data) - if not data['reply']: - abort(400, 'reply should not be empty') - - source.interaction_count += 1 - try: - filename = current_app.storage.save_pre_encrypted_reply( - source.filesystem_id, - source.interaction_count, - source.journalist_filename, - data['reply']) - except NotEncrypted: + def all_source_replies(source_uuid): + if request.method == 'GET': + source = get_or_404(Source, source_uuid, column=Source.uuid) return jsonify( - {'message': 'You must encrypt replies client side'}), 400 + {'replies': [reply.to_json() for + reply in source.replies]}), 200 + elif request.method == 'POST': + source = get_or_404(Source, source_uuid, + column=Source.uuid) + if request.json is None: + abort(400, 'please send requests in valid JSON') + + if 'reply' not in request.json: + abort(400, 'reply not found in request body') + + user = get_user_object(request) + + data = json.loads(request.data) + if not data['reply']: + abort(400, 'reply should not be empty') + + source.interaction_count += 1 + try: + filename = current_app.storage.save_pre_encrypted_reply( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + data['reply']) + except NotEncrypted: + return jsonify( + {'message': 'You must encrypt replies client side'}), 400 + + reply = Reply(user, source, + current_app.storage.path(source.filesystem_id, + filename)) + db.session.add(reply) + db.session.add(source) + db.session.commit() + return jsonify({'message': 'Your reply has been stored'}), 201 - reply = Reply(user, source, - current_app.storage.path(source.filesystem_id, filename)) - db.session.add(reply) - db.session.add(source) - db.session.commit() - return jsonify({'message': 'Your reply has been stored'}), 201 + @api.route('/sources/<source_uuid>/replies/<reply_uuid>', + methods=['GET', 'DELETE']) + @token_required + def single_reply(source_uuid, reply_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) + if request.method == 'GET': + return jsonify(reply.to_json()), 200 + elif request.method == 'DELETE': + utils.delete_file(source.filesystem_id, reply.filename, + reply) + return jsonify({'message': 'Reply deleted'}), 200 @api.route('/submissions', methods=['GET']) @token_required @@ -245,6 +266,13 @@ def get_all_submissions(): return jsonify({'submissions': [submission.to_json() for submission in submissions]}), 200 + @api.route('/replies', methods=['GET']) + @token_required + def get_all_replies(): + replies = Reply.query.all() + return jsonify( + {'replies': [reply.to_json() for reply in replies]}), 200 + @api.route('/user', methods=['GET']) @token_required def get_current_user(): diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -4,6 +4,7 @@ from flask import (g, flash, current_app, abort, send_file, redirect, url_for, render_template, Markup) from flask_babel import gettext, ngettext +import hashlib from sqlalchemy.sql.expression import false import i18n @@ -323,3 +324,16 @@ def col_download_all(cols_selected): submissions += Submission.query.filter( Submission.source_id == id).all() return download("all", submissions) + + +def serve_file_with_etag(source, filename): + response = send_file(current_app.storage.path(source.filesystem_id, + filename), + mimetype="application/pgp-encrypted", + as_attachment=True, + add_etags=False) # Disable Flask default ETag + + response.direct_passthrough = False + response.headers['Etag'] = '"sha256:{}"'.format( + hashlib.sha256(response.get_data()).hexdigest()) + return response diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -150,7 +150,8 @@ def to_json(self): 'add_star_url': url_for('api.add_star', source_uuid=self.uuid), 'remove_star_url': url_for('api.remove_star', source_uuid=self.uuid), - 'reply_url': url_for('api.post_reply', source_uuid=self.uuid) + 'replies_url': url_for('api.all_source_replies', + source_uuid=self.uuid) } return json_source @@ -200,6 +201,7 @@ def to_json(self): class Reply(db.Model): __tablename__ = "replies" id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) journalist_id = Column(Integer, ForeignKey('journalists.id')) journalist = relationship( @@ -222,6 +224,7 @@ class Reply(db.Model): def __init__(self, journalist, source, filename): self.journalist_id = journalist.id self.source_id = source.id + self.uuid = str(uuid.uuid4()) self.filename = filename self.size = os.stat(current_app.storage.path(source.filesystem_id, filename)).st_size @@ -229,6 +232,22 @@ def __init__(self, journalist, source, filename): def __repr__(self): return '<Reply %r>' % (self.filename) + def to_json(self): + json_submission = { + 'source_url': url_for('api.single_source', + source_uuid=self.source.uuid), + 'reply_url': url_for('api.single_reply', + source_uuid=self.source.uuid, + reply_uuid=self.uuid), + 'filename': self.filename, + 'size': self.size, + 'journalist_username': self.journalist.username, + 'journalist_uuid': self.journalist.uuid, + 'uuid': self.uuid, + 'is_deleted_by_source': self.deleted_by_source, + } + return json_submission + class SourceStar(db.Model): __tablename__ = 'source_stars' @@ -300,6 +319,7 @@ class NonDicewarePassword(PasswordError): class Journalist(db.Model): __tablename__ = "journalists" id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) username = Column(String(255), nullable=False, unique=True) pw_salt = Column(Binary(32)) pw_hash = Column(Binary(256)) @@ -324,6 +344,7 @@ def __init__(self, username, password, is_admin=False, otp_secret=None): self.username = username self.set_password(password) self.is_admin = is_admin + self.uuid = str(uuid.uuid4()) if otp_secret: self.set_hotp_secret(otp_secret) @@ -552,7 +573,8 @@ def to_json(self): json_user = { 'username': self.username, 'last_login': self.last_access.isoformat() + 'Z', - 'is_admin': self.is_admin + 'is_admin': self.is_admin, + 'uuid': self.uuid } return json_user
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -181,6 +181,20 @@ def test_submissions(journalist_app): 'submissions': source.submissions} [email protected](scope='function') +def test_files(journalist_app, test_journo): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + utils.db_helper.reply(test_journo['journalist'], source, 1) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions, + 'replies': source.replies} + + @pytest.fixture(scope='function') def journalist_api_token(journalist_app, test_journo): with journalist_app.test_client() as app: diff --git a/securedrop/tests/migrations/migration_6db892e17271.py b/securedrop/tests/migrations/migration_6db892e17271.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_6db892e17271.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid.uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the deleted_by_source column now exists, + and that the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + assert len(replies) == self.JOURNO_NUM - 1 + + for reply in replies: + assert reply.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'uuid': str(uuid.uuid4()), + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, uuid, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :uuid, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the deleted_by_source column is now gone, and + otherwise the table has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + + for reply in replies: + try: + # This should produce an exception, as the column (should) + # be gone. + assert reply['uuid'] is None + except NoSuchColumnError: + pass + + assert len(replies) == self.JOURNO_NUM - 1 diff --git a/securedrop/tests/migrations/migration_f2833ac34bb6.py b/securedrop/tests/migrations/migration_f2833ac34bb6.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_f2833ac34bb6.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + journalists = db.engine.execute( + text('SELECT * FROM journalists')).fetchall() + + for journalist in journalists: + assert journalist.uuid is not None + + +class DowngradeTester(): + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'uuid': str(uuid.uuid4()), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, uuid, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :uuid, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(sql)).fetchall() + + for journalist in journalists: + try: + # This should produce an exception, as the column (should) + # be gone. + assert journalist['uuid'] is None + except NoSuchColumnError: + pass diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -21,7 +21,8 @@ def test_unauthenticated_user_gets_all_endpoints(journalist_app): observed_endpoints = json.loads(response.data) expected_endpoints = [u'current_user_url', u'submissions_url', - u'sources_url', u'auth_token_url'] + u'sources_url', u'auth_token_url', + u'replies_url'] assert expected_endpoints == observed_endpoints.keys() @@ -135,18 +136,22 @@ def test_authorized_user_gets_all_sources(journalist_app, test_submissions, def test_user_without_token_cannot_get_protected_endpoints(journalist_app, - test_submissions): + test_files): with journalist_app.app_context(): - uuid = test_submissions['source'].uuid + uuid = test_files['source'].uuid protected_routes = [ url_for('api.get_all_sources'), url_for('api.single_source', source_uuid=uuid), url_for('api.all_source_submissions', source_uuid=uuid), url_for('api.single_submission', source_uuid=uuid, - submission_uuid=test_submissions['submissions'][0].uuid), + submission_uuid=test_files['submissions'][0].uuid), url_for('api.download_submission', source_uuid=uuid, - submission_uuid=test_submissions['submissions'][0].uuid), + submission_uuid=test_files['submissions'][0].uuid), url_for('api.get_all_submissions'), + url_for('api.get_all_replies'), + url_for('api.single_reply', source_uuid=uuid, + reply_uuid=test_files['replies'][0].uuid), + url_for('api.all_source_replies', source_uuid=uuid), url_for('api.get_current_user') ] @@ -220,7 +225,7 @@ def test_user_without_token_cannot_post_protected_endpoints(journalist_app, with journalist_app.app_context(): uuid = test_source['source'].uuid protected_routes = [ - url_for('api.post_reply', source_uuid=uuid), + url_for('api.all_source_replies', source_uuid=uuid), url_for('api.add_star', source_uuid=uuid), url_for('api.flag', source_uuid=uuid) ] @@ -410,6 +415,70 @@ def test_authorized_user_can_get_single_submission(journalist_app, test_submissions['source'].submissions[0].size +def test_authorized_user_can_get_all_replies(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_replies'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + json_response = json.loads(response.data) + + observed_replies = [reply['filename'] for + reply in json_response['replies']] + + expected_replies = [reply.filename for + reply in Reply.query.all()] + assert observed_replies == expected_replies + + +def test_authorized_user_get_source_replies(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_files['source'].uuid + response = app.get(url_for('api.all_source_replies', + source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + json_response = json.loads(response.data) + + observed_replies = [reply['filename'] for + reply in json_response['replies']] + + expected_replies = [reply.filename for + reply in test_files['source'].replies] + assert observed_replies == expected_replies + + +def test_authorized_user_can_get_single_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + response = app.get(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + json_response = json.loads(response.data) + + reply = Reply.query.filter(Reply.uuid == reply_uuid).one() + + assert json_response['uuid'] == reply_uuid + assert json_response['journalist_username'] == \ + reply.journalist.username + assert json_response['journalist_uuid'] == \ + reply.journalist.uuid + assert json_response['is_deleted_by_source'] is False + assert json_response['filename'] == \ + test_files['source'].replies[0].filename + assert json_response['size'] == \ + test_files['source'].replies[0].size + + def test_authorized_user_can_delete_single_submission(journalist_app, test_submissions, journalist_api_token): @@ -428,6 +497,22 @@ def test_authorized_user_can_delete_single_submission(journalist_app, Submission.uuid == submission_uuid).all() == [] +def test_authorized_user_can_delete_single_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + response = app.delete(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Reply should now be gone. + assert Reply.query.filter(Reply.uuid == reply_uuid).all() == [] + + def test_authorized_user_can_delete_source_collection(journalist_app, test_source, journalist_api_token): @@ -469,6 +554,27 @@ def test_authorized_user_can_download_submission(journalist_app, hashlib.sha256(response.data).hexdigest()) +def test_authorized_user_can_download_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + + response = app.get(url_for('api.download_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Response should be a PGP encrypted download + assert response.mimetype == 'application/pgp-encrypted' + + # Response should have Etag field with hash + assert response.headers['ETag'] == '"sha256:{}"'.format( + hashlib.sha256(response.data).hexdigest()) + + def test_authorized_user_can_get_current_user_endpoint(journalist_app, test_journo, journalist_api_token): @@ -480,6 +586,7 @@ def test_authorized_user_can_get_current_user_endpoint(journalist_app, json_response = json.loads(response.data) assert json_response['is_admin'] is False assert json_response['username'] == test_journo['username'] + assert json_response['uuid'] == test_journo['journalist'].uuid def test_request_with_missing_auth_header_triggers_403(journalist_app): @@ -508,7 +615,8 @@ def test_unencrypted_replies_get_rejected(journalist_app, journalist_api_token, with journalist_app.test_client() as app: uuid = test_source['source'].uuid reply_content = 'This is a plaintext reply' - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data=json.dumps({'reply': reply_content}), headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -527,7 +635,8 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, reply_content = current_app.crypto_util.gpg.encrypt( 'This is a plaintext reply', source_key).data - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data=json.dumps({'reply': reply_content}), headers=get_api_headers(journalist_api_token)) assert response.status_code == 201 @@ -557,7 +666,8 @@ def test_reply_without_content_400(journalist_app, journalist_api_token, test_source, test_journo): with journalist_app.test_client() as app: uuid = test_source['source'].uuid - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data=json.dumps({'reply': ''}), headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -567,7 +677,8 @@ def test_reply_without_reply_field_400(journalist_app, journalist_api_token, test_source, test_journo): with journalist_app.test_client() as app: uuid = test_source['source'].uuid - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data=json.dumps({'other': 'stuff'}), headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -577,7 +688,8 @@ def test_reply_without_json_400(journalist_app, journalist_api_token, test_source, test_journo): with journalist_app.test_client() as app: uuid = test_source['source'].uuid - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data='invalid', headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -587,7 +699,8 @@ def test_reply_with_valid_curly_json_400(journalist_app, journalist_api_token, test_source, test_journo): with journalist_app.test_client() as app: uuid = test_source['source'].uuid - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data='{}', headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -600,7 +713,8 @@ def test_reply_with_valid_square_json_400(journalist_app, journalist_api_token, test_source, test_journo): with journalist_app.test_client() as app: uuid = test_source['source'].uuid - response = app.post(url_for('api.post_reply', source_uuid=uuid), + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), data='[]', headers=get_api_headers(journalist_api_token)) assert response.status_code == 400 @@ -616,7 +730,7 @@ def test_malformed_json_400(journalist_app, journalist_api_token, test_journo, uuid = test_source['source'].uuid protected_routes = [ url_for('api.get_token'), - url_for('api.post_reply', source_uuid=uuid), + url_for('api.all_source_replies', source_uuid=uuid), url_for('api.add_star', source_uuid=uuid), url_for('api.flag', source_uuid=uuid), ] @@ -639,7 +753,7 @@ def test_empty_json_400(journalist_app, journalist_api_token, test_journo, uuid = test_source['source'].uuid protected_routes = [ url_for('api.get_token'), - url_for('api.post_reply', source_uuid=uuid), + url_for('api.all_source_replies', source_uuid=uuid), ] with journalist_app.test_client() as app: for protected_route in protected_routes:
[reply refactor] Add journalist interface API endpoints for replies We should add (at least) two new API endpoints for the client to use: `GET /api/v1/replies` and `GET /api/v1/sources/<source_uuid>/replies` Such that journalists can get replies across all sources and then download replies per-source Epic: #3097
2018-08-07T00:23:31Z
[]
[]
freedomofpress/securedrop
3,703
freedomofpress__securedrop-3703
[ "3205" ]
4ae97c3436886c2f04923ee63e5979beabc1b655
diff --git a/testinfra/combine-junit.py b/devops/scripts/combine-junit.py similarity index 81% rename from testinfra/combine-junit.py rename to devops/scripts/combine-junit.py --- a/testinfra/combine-junit.py +++ b/devops/scripts/combine-junit.py @@ -36,7 +36,11 @@ def merge_results(xml_files): cases = [] for file_name in xml_files: - tree = ET.parse(file_name) + # We disable bandit checking to permit B314, which recommends use + # of defusedxml to protect against malicious XML files. This code + # path only runs in CI, not on developer workstations, and the XML + # output is generated by testinfra on staging machines. + tree = ET.parse(file_name) # nosec test_suite = tree.getroot() failures += int(test_suite.attrib['failures']) tests += int(test_suite.attrib['tests']) diff --git a/molecule/testinfra/staging/conftest.py b/molecule/testinfra/staging/conftest.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/conftest.py @@ -0,0 +1,91 @@ +""" +Configuration for TestInfra test suite for SecureDrop. +Handles importing host-specific test vars, so test functions +can be reused across multiple hosts, with varied targets. + +Vars should be placed in `testinfra/vars/<hostname>.yml`. +""" + +import io +import os +import yaml + + +# The config tests target staging by default. It's possible to override +# for e.g. prod, but the associated vars files are not yet ported. +target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging') + + +def securedrop_import_testinfra_vars(hostname, with_header=False): + """ + Import vars from a YAML file to populate tests with host-specific + values used in checks. For instance, the SecureDrop docroot will + be under /vagrant in development, but /var/www/securedrop in staging. + + Vars must be stored in `testinfra/vars/<hostname>.yml`. + """ + filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml") + with io.open(filepath, 'r') as f: + hostvars = yaml.safe_load(f) + + if with_header: + hostvars = dict(securedrop_test_vars=hostvars) + + if os.environ.get("FPF_CI", False): + export_ci_var_overrides() + + return hostvars + + +def export_ci_var_overrides(): + """ + In CI environments, the hardcoded local IP addresses aren't valid + (since we're testing against remote AWS hosts). Detect the CI env, + and look up the proper IPs for use in the testinfra tests. + Expose those IPs as environment variables, so the tests can use them. + """ + molecule_info = lookup_molecule_info() + app_ip = lookup_aws_private_address(molecule_info, 'app-staging') + mon_ip = lookup_aws_private_address(molecule_info, 'mon-staging') + + os.environ['APP_IP'] = app_ip + os.environ['MON_IP'] = mon_ip + + # Make SSH calls more resilient, as we're operating against remote hosts, + # and running from CI. We've observed flakey connections in CI at times. + os.environ['ANSIBLE_SSH_RETRIES'] = '5' + ssh_args = [ + "-o ConnectTimeout=60s", + "-o ControlMaster=auto", + "-o ControlPersist=180s", + "-o StrictHostKeyChecking=no", + ] + os.environ['ANSIBLE_SSH_ARGS'] = " ".join(ssh_args) + + +def lookup_aws_private_address(molecule_info, hostname): + """ + Inspect Molecule instance config dict (imported from YAML file), + and return the attribute for the requested hostname. + """ + + host_info = list(filter(lambda x: x['instance'] == hostname, + molecule_info))[0] + host_ip = host_info['priv_address'] + return host_ip + + +def lookup_molecule_info(): + """ + Molecule automatically writes YAML files documenting dynamic host info + such as remote IPs. Read that file and pass back the config dict. + """ + molecule_instance_config_path = os.path.abspath( + os.environ['MOLECULE_INSTANCE_CONFIG']) + with open(molecule_instance_config_path, 'r') as f: + molecule_instance_config = yaml.safe_load(f) + return molecule_instance_config + + +def pytest_namespace(): + return securedrop_import_testinfra_vars(target_host, with_header=True) diff --git a/testinfra/conftest.py b/testinfra/conftest.py deleted file mode 100644 --- a/testinfra/conftest.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -Configuration for TestInfra test suite for SecureDrop. -Handles importing host-specific test vars, so test functions -can be reused across multiple hosts, with varied targets. - -Vars should be placed in `testinfra/vars/<hostname>.yml`. -""" - -import io -import os -import yaml - - -target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] -assert target_host != "" - - -def securedrop_import_testinfra_vars(hostname, with_header=False): - """ - Import vars from a YAML file to populate tests with host-specific - values used in checks. For instance, the SecureDrop docroot will - be under /vagrant in development, but /var/www/securedrop in staging. - - Vars must be stored in `testinfra/vars/<hostname>.yml`. - """ - filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml") - with io.open(filepath, 'r') as f: - hostvars = yaml.safe_load(f) - - if with_header: - hostvars = dict(securedrop_test_vars=hostvars) - return hostvars - - -def pytest_namespace(): - return securedrop_import_testinfra_vars(target_host, with_header=True)
diff --git a/molecule/aws/tests/test_tor_interfaces.py b/molecule/aws/tests/test_tor_interfaces.py --- a/molecule/aws/tests/test_tor_interfaces.py +++ b/molecule/aws/tests/test_tor_interfaces.py @@ -21,7 +21,7 @@ def test_www(host, site): os.path.dirname(__file__), "../../../install_files/ansible-base/{}".format(site['file']) ) - onion_url_raw = io.open(onion_url_filepath, 'ro').read() + onion_url_raw = io.open(onion_url_filepath, 'r').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. diff --git a/testinfra/app-code/test_haveged.py b/molecule/testinfra/staging/app-code/test_haveged.py similarity index 96% rename from testinfra/app-code/test_haveged.py rename to molecule/testinfra/staging/app-code/test_haveged.py --- a/testinfra/app-code/test_haveged.py +++ b/molecule/testinfra/staging/app-code/test_haveged.py @@ -1,3 +1,6 @@ +testinfra_hosts = ["app-staging"] + + def test_haveged_config(File): """ Ensure haveged's low entrop watermark is sufficiently high. diff --git a/testinfra/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py similarity index 97% rename from testinfra/app-code/test_redis_worker.py rename to molecule/testinfra/staging/app-code/test_redis_worker.py --- a/testinfra/app-code/test_redis_worker.py +++ b/molecule/testinfra/staging/app-code/test_redis_worker.py @@ -2,6 +2,7 @@ import re +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py similarity index 97% rename from testinfra/app-code/test_securedrop_app_code.py rename to molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/testinfra/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -1,9 +1,8 @@ -import os import pytest +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] def test_apache_default_docroot_is_absent(File): diff --git a/testinfra/app-code/test_xvfb.py b/molecule/testinfra/staging/app-code/test_xvfb.py similarity index 99% rename from testinfra/app-code/test_xvfb.py rename to molecule/testinfra/staging/app-code/test_xvfb.py --- a/testinfra/app-code/test_xvfb.py +++ b/molecule/testinfra/staging/app-code/test_xvfb.py @@ -1,3 +1,6 @@ +testinfra_hosts = ["app-staging"] + + def test_xvfb_is_installed(Package): """ Ensure apt requirements for Xvfb are present. diff --git a/testinfra/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py similarity index 99% rename from testinfra/app/apache/test_apache_journalist_interface.py rename to molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/testinfra/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -2,6 +2,7 @@ import re +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars # Setting once so it can be reused in multiple tests. diff --git a/testinfra/app/apache/test_apache_service.py b/molecule/testinfra/staging/app/apache/test_apache_service.py similarity index 98% rename from testinfra/app/apache/test_apache_service.py rename to molecule/testinfra/staging/app/apache/test_apache_service.py --- a/testinfra/app/apache/test_apache_service.py +++ b/molecule/testinfra/staging/app/apache/test_apache_service.py @@ -1,6 +1,7 @@ import pytest +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/app/apache/test_apache_source_interface.py b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py similarity index 98% rename from testinfra/app/apache/test_apache_source_interface.py rename to molecule/testinfra/staging/app/apache/test_apache_source_interface.py --- a/testinfra/app/apache/test_apache_source_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py @@ -1,7 +1,7 @@ import pytest import re - +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py similarity index 99% rename from testinfra/app/apache/test_apache_system_config.py rename to molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/testinfra/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -1,7 +1,7 @@ import pytest import re - +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/app/test_network.py b/molecule/testinfra/staging/app/test_app_network.py similarity index 98% rename from testinfra/app/test_network.py rename to molecule/testinfra/staging/app/test_app_network.py --- a/testinfra/app/test_network.py +++ b/molecule/testinfra/staging/app/test_app_network.py @@ -5,6 +5,7 @@ from jinja2 import Template +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/app/test_apparmor.py b/molecule/testinfra/staging/app/test_apparmor.py similarity index 99% rename from testinfra/app/test_apparmor.py rename to molecule/testinfra/staging/app/test_apparmor.py --- a/testinfra/app/test_apparmor.py +++ b/molecule/testinfra/staging/app/test_apparmor.py @@ -1,6 +1,7 @@ import pytest +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars diff --git a/testinfra/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py similarity index 98% rename from testinfra/app/test_appenv.py rename to molecule/testinfra/staging/app/test_appenv.py --- a/testinfra/app/test_appenv.py +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -1,5 +1,6 @@ import pytest +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars diff --git a/testinfra/app/test_ossec.py b/molecule/testinfra/staging/app/test_ossec_agent.py similarity index 100% rename from testinfra/app/test_ossec.py rename to molecule/testinfra/staging/app/test_ossec_agent.py diff --git a/testinfra/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py similarity index 98% rename from testinfra/app/test_tor_config.py rename to molecule/testinfra/staging/app/test_tor_config.py --- a/testinfra/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -1,5 +1,6 @@ import pytest +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars diff --git a/testinfra/app/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py similarity index 98% rename from testinfra/app/test_tor_hidden_services.py rename to molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/testinfra/app/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -2,6 +2,7 @@ import re +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars diff --git a/testinfra/common/test_cron_apt.py b/molecule/testinfra/staging/common/test_cron_apt.py similarity index 100% rename from testinfra/common/test_cron_apt.py rename to molecule/testinfra/staging/common/test_cron_apt.py diff --git a/testinfra/common/test_fpf_apt_repo.py b/molecule/testinfra/staging/common/test_fpf_apt_repo.py similarity index 100% rename from testinfra/common/test_fpf_apt_repo.py rename to molecule/testinfra/staging/common/test_fpf_apt_repo.py diff --git a/testinfra/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py similarity index 100% rename from testinfra/common/test_grsecurity.py rename to molecule/testinfra/staging/common/test_grsecurity.py diff --git a/testinfra/common/test_ip6tables.py b/molecule/testinfra/staging/common/test_ip6tables.py similarity index 100% rename from testinfra/common/test_ip6tables.py rename to molecule/testinfra/staging/common/test_ip6tables.py diff --git a/testinfra/common/test_platform.py b/molecule/testinfra/staging/common/test_platform.py similarity index 100% rename from testinfra/common/test_platform.py rename to molecule/testinfra/staging/common/test_platform.py diff --git a/testinfra/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py similarity index 83% rename from testinfra/common/test_system_hardening.py rename to molecule/testinfra/staging/common/test_system_hardening.py --- a/testinfra/common/test_system_hardening.py +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -1,9 +1,6 @@ -import os import pytest import re -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - @pytest.mark.parametrize('sysctl_opt', [ ('net.ipv4.conf.all.accept_redirects', 0), @@ -63,19 +60,21 @@ def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module): assert f.contains("^blacklist {}$".format(kernel_module)) [email protected](hostenv.startswith('mon'), - reason="Monitor Server does not have swap disabled yet.") def test_swap_disabled(Command): """ Ensure swap space is disabled. Prohibit writing memory to swapfiles to reduce the threat of forensic analysis leaking any sensitive info. """ - c = Command.check_output('swapon --summary') - # A leading slash will indicate full path to a swapfile. - assert not re.search("^/", c, re.M) - # Expect that ONLY the headers will be present in the output. - rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority") - assert re.search(rgx, c) + hostname = Command.check_output('hostname') + + # Mon doesn't have swap disabled yet + if not hostname.startswith('mon'): + c = Command.check_output('swapon --summary') + # A leading slash will indicate full path to a swapfile. + assert not re.search("^/", c, re.M) + # Expect that ONLY the headers will be present in the output. + rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority") + assert re.search(rgx, c) def test_twofactor_disabled_on_tty(host): diff --git a/testinfra/common/test_tor_mirror.py b/molecule/testinfra/staging/common/test_tor_mirror.py similarity index 100% rename from testinfra/common/test_tor_mirror.py rename to molecule/testinfra/staging/common/test_tor_mirror.py diff --git a/testinfra/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py similarity index 98% rename from testinfra/common/test_user_config.py rename to molecule/testinfra/staging/common/test_user_config.py --- a/testinfra/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -1,8 +1,6 @@ import os import re -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - def test_sudoers_config(File, Sudo): """ diff --git a/testinfra/mon/test_network.py b/molecule/testinfra/staging/mon/test_mon_network.py similarity index 98% rename from testinfra/mon/test_network.py rename to molecule/testinfra/staging/mon/test_mon_network.py --- a/testinfra/mon/test_network.py +++ b/molecule/testinfra/staging/mon/test_mon_network.py @@ -5,6 +5,7 @@ from jinja2 import Template +testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/mon/test_ossec_ruleset.py b/molecule/testinfra/staging/mon/test_ossec_ruleset.py similarity index 96% rename from testinfra/mon/test_ossec_ruleset.py rename to molecule/testinfra/staging/mon/test_ossec_ruleset.py --- a/testinfra/mon/test_ossec_ruleset.py +++ b/molecule/testinfra/staging/mon/test_ossec_ruleset.py @@ -1,7 +1,7 @@ import pytest import re - +testinfra_hosts = ["mon-staging"] alert_level_regex = re.compile(r"Level: '(\d+)'") rule_id_regex = re.compile(r"Rule id: '(\d+)'") sdvars = pytest.securedrop_test_vars diff --git a/testinfra/mon/test_ossec.py b/molecule/testinfra/staging/mon/test_ossec_server.py similarity index 98% rename from testinfra/mon/test_ossec.py rename to molecule/testinfra/staging/mon/test_ossec_server.py --- a/testinfra/mon/test_ossec.py +++ b/molecule/testinfra/staging/mon/test_ossec_server.py @@ -2,6 +2,7 @@ import pytest +testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/mon/test_postfix.py b/molecule/testinfra/staging/mon/test_postfix.py similarity index 98% rename from testinfra/mon/test_postfix.py rename to molecule/testinfra/staging/mon/test_postfix.py --- a/testinfra/mon/test_postfix.py +++ b/molecule/testinfra/staging/mon/test_postfix.py @@ -2,6 +2,7 @@ import pytest +testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars diff --git a/testinfra/ossec/test_journalist_mail.py b/molecule/testinfra/staging/ossec/test_journalist_mail.py similarity index 91% rename from testinfra/ossec/test_journalist_mail.py rename to molecule/testinfra/staging/ossec/test_journalist_mail.py --- a/testinfra/ossec/test_journalist_mail.py +++ b/molecule/testinfra/staging/ossec/test_journalist_mail.py @@ -1,8 +1,14 @@ import pytest +import os import testinfra import time +# DRY declaration of why we're skipping all these tests. +# For details, see https://github.com/freedomofpress/securedrop/issues/3689 +SKIP_REASON = "unimplemented, see GH#3689" + + class TestBase(object): @pytest.fixture(autouse=True) @@ -62,14 +68,18 @@ def service_stopped(self, host, name): class TestJournalistMail(TestBase): + @pytest.mark.skip(reason=SKIP_REASON) def test_procmail(self, host): self.service_started(host, "postfix") for (destination, f) in ( ('journalist', 'alert-journalist-one.txt'), ('journalist', 'alert-journalist-two.txt'), ('ossec', 'alert-ossec.txt')): + # Look up CWD, in case tests move in the future + current_dir = os.path.dirname(os.path.abspath(__file__)) self.ansible(host, "copy", - "dest=/tmp/{f} src=testinfra/ossec/{f}".format(f=f)) + "dest=/tmp/{f} src={d}/{f}".format(f=f, + d=current_dir)) assert self.run(host, "/var/ossec/process_submissions_today.sh forget") assert self.run(host, "postsuper -d ALL") @@ -82,6 +92,7 @@ def test_procmail(self, host): destination=destination)) self.service_stopped(host, "postfix") + @pytest.mark.skip(reason=SKIP_REASON) def test_process_submissions_today(self, host): assert self.run(host, "/var/ossec/process_submissions_today.sh " @@ -90,9 +101,11 @@ def test_process_submissions_today(self, host): "/var/ossec/process_submissions_today.sh " "test_modified_in_the_past_24h") + @pytest.mark.skip(reason=SKIP_REASON) def test_send_encrypted_alert(self, host): self.service_started(host, "postfix") - src = "install_files/ansible-base/roles/ossec/files/test_admin_key.sec" + src = ("../../install_files/ansible-base/roles/ossec/files/" + "test_admin_key.sec") self.ansible(host, "copy", "dest=/tmp/test_admin_key.sec src={src}".format(src=src)) @@ -144,6 +157,7 @@ def trigger(who, payload): assert self.run(host, "mv /usr/bin/gpg.save /usr/bin/gpg") self.service_stopped(host, "postfix") + @pytest.mark.skip(reason=SKIP_REASON) def test_missing_journalist_alert(self, host): # # missing journalist mail does nothing @@ -158,6 +172,7 @@ def test_missing_journalist_alert(self, host): """) # https://ossec-docs.readthedocs.io/en/latest/manual/rules-decoders/testing.html + @pytest.mark.skip(reason=SKIP_REASON) def test_ossec_rule_journalist(self, host): assert self.run(host, """ set -ex @@ -166,6 +181,7 @@ def test_ossec_rule_journalist(self, host): echo "$l" | /var/ossec/bin/ossec-logtest -U '400600:1:ossec' """) + @pytest.mark.skip(reason=SKIP_REASON) def test_journalist_mail_notification(self, host): mon = host app = testinfra.host.Host.get_host( diff --git a/testinfra/ansible/test_validate_users.py b/testinfra/ansible/test_validate_users.py deleted file mode 100644 --- a/testinfra/ansible/test_validate_users.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -import os - - [email protected](reason="Validation not fully implemented yet") [email protected]('username', [ - 'root', - 'amnesia', -]) -def test_validate_users(LocalCommand, username): - """ - Check that Ansible halts execution of the playbook if the Admin - username is set to any disallowed value. - """ - var_override = "--tags validate --extra-vars ssh_users={}".format(username) - os.environ['ANSIBLE_ARGS'] = var_override - c = LocalCommand("vagrant provision /staging/") - - assert c.rc != 0 diff --git a/testinfra/test.py b/testinfra/test.py deleted file mode 100755 --- a/testinfra/test.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python -""" -Wrapper script for running Testinfra against SecureDrop VMs. -Accepts a single argument: the hostname to run the tests against. -Script will handle building the list of tests to run, based on hostname. -""" -import os -import subprocess -import sys -import tempfile - -target_host = sys.argv[1] - -# Set env var so that `testinfra/conftest.py` can read in a YAML vars file -# specific to the host being tested. -os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = target_host - - -def get_target_roles(target_host): - """ - Assemble list of role tests to run. Hard-coded per host. - """ - target_roles = {"app-staging": ['testinfra/app', - 'testinfra/app-code', - 'testinfra/common'], - "staging": ['testinfra/ossec'], - "mon-staging": ['testinfra/mon', - 'testinfra/common'], - "mon-prod": ['testinfra/mon']} - - try: - return target_roles[target_host] - except KeyError: - print("Unknown host '{}'! Exiting.".format(target_host)) - sys.exit(1) - - -def run_testinfra(target_host, verbose=True): - """ - Handler for executing testinfra against `target_host`. - Queries list of roles via helper def `get_target_roles`. - """ - conn_type = "ssh" - target_roles = get_target_roles(target_host) - if verbose: - # Print informative output prior to test run. - print("Running Testinfra suite against '{}'...".format(target_host)) - print("Target roles:") - for role in target_roles: - print(" - {}".format(role)) - - # Prod hosts host have SSH access over Tor. Let's use the SSH backend - # for Testinfra, rather than Ansible. When we write a dynamic inventory - # script for Ansible SSH-over-Tor, we can use the Ansible backend - # everywhere. - if target_host.endswith("-prod"): - os.environ['SECUREDROP_SSH_OVER_TOR'] = '1' - # Dump SSH config to tempfile so it can be passed as arg to testinfra. - ssh_config_output = subprocess.check_output(["vagrant", "ssh-config", - target_host]) - # Create temporary file to store ssh-config. Not deleting it - # automatically because there's no sensitive info (HidServAuth is - # required to connect), and we'll need it outside of the - # context-manager block that writes to it. - ssh_config_tmpfile = tempfile.NamedTemporaryFile(delete=False) - with ssh_config_tmpfile.file as f: - f.write(ssh_config_output) - ssh_config_path = ssh_config_tmpfile.name - testinfra_command_template = """ -testinfra \ - -vv \ - -n auto \ - --connection ssh \ - --ssh-config \ - {ssh_config_path}\ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - elif os.environ.get("FPF_CI", 'false') == 'true': - ssh_config_path = os.environ["CI_SSH_CONFIG"] - testinfra_command_template = """ -testinfra \ - -vv \ - -n 8 \ - --connection {connection_type} \ - --ssh-config \ - {ssh_config_path}\ - --junit-xml=./{target_host}-results.xml\ - --junit-prefix={target_host}\ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - elif target_host == 'staging': - if "CI_SSH_CONFIG" in os.environ: - ssh_config_path = '--ssh-config ' + os.environ["CI_SSH_CONFIG"] - inventory = "" - junit = """ - --junit-xml=./{target_host}-results.xml \ - --junit-prefix={target_host} \ - """ - else: - ssh_config_path = "" - inventory = """ - --ansible-inventory \ - .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory \ - """ - junit = "" - testinfra_command_template = """ -testinfra \ - -vv \ - --connection ansible \ - {testinfra_args} \ - {ssh_config_path} \ - {inventory} \ - {junit} \ - --hosts app-staging,mon-staging \ - {target_roles} -""".format(ssh_config_path=ssh_config_path, - inventory=inventory, - target_roles=" ".join(target_roles), - junit=junit, - testinfra_args=os.environ.get('TESTINFRA_ARGS', '')) - - else: - ssh_config_path = "" - testinfra_command_template = """ -testinfra \ - -vv \ - -n auto \ - --connection ansible \ - --ansible-inventory \ - .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory \ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - testinfra_command = testinfra_command_template.format( - target_host=target_host, - ssh_config_path=ssh_config_path, - connection_type=conn_type, - target_roles=" ".join(target_roles), - ).split() - - # Execute config tests. - subprocess.check_call(testinfra_command) - - -if __name__ == "__main__": - run_testinfra(target_host)
Suppress OSSEC alerts asking SecureDrop administrators to upgrade to Xenial Per the original report in #1530, administrators may under some circumstances receive OSSEC alerts asking them to upgrade their SecureDrop servers to Ubuntu Xenial. Doing so would be *a very bad idea* as Ubuntu Xenial is not in fact supported yet (see #3204). We should therefore investigate whether administrators are still receiving such alerts under any circumstances. Tasks: - [x] Verify whether these alerts are still sent - [x] If appropriate, suppress them
I can confirm that these alerts are still being sent to at least a couple admins. One such admin received this within the past 24 hours: ``` Subject: Cron root@mon test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.weekly ) /etc/cron.weekly/update-notifier-common: New release '16.04.4 LTS' available. Run 'do-release-upgrade' to upgrade to it. ``` I confirm receipt of this alert as well We've agreed to tackle this as a stretch goal for the 0.8.0 release (i.e. ensuring a robust release is the top priority, but if early QA results are positive, we may attempt a fix). If it doesn't make it, we'll include a note in the release announcement warning admins to ignore this alert. In prod/staging VMs one won't get this alert by default. Note that I did see it on hardware - every week. To enable it on VMs in `/etc/update-manager/release-upgrades` change `Prompt=never` to `Prompt=lts` for parity with hardware (thanks to @conorsch for pointing me to this file). Now `/etc/cron.weekly/update-notifier-common` should report to the user that `do-release-upgrade` is needed (if you still don't get the alert, you should remove the stamp file stored at `/var/lib/ubuntu-release-upgrader/release-upgrade-available`). For testing one can just temporarily run the (previously weekly) cronjob more often to get the alert every e.g. 5 minutes: ``` */5 * * * * root test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.weekly) ``` But since the message in question here isn't being parsed by OSSEC, I don't think adding a local rule will suppress it - instead, the simplest resolution is probably just to take the same approach as in our vagrant VMs: in `/etc/update-manager/release-upgrades` replace `Prompt=lts` with `Prompt=never`. We could do this in `postinst` of the SecureDrop OSSEC packages.
2018-08-08T16:58:04Z
[]
[]
freedomofpress/securedrop
3,709
freedomofpress__securedrop-3709
[ "3708" ]
885ccfa2f75109015634bd9f488b1946cbcb2d4b
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -78,7 +78,7 @@ def create_source_and_submissions(num_submissions=2): parser = argparse.ArgumentParser() parser.add_argument("--staging", help="Adding user for staging tests.", - action="store_true") + action="store_true") args = parser.parse_args() add_test_user("journalist", test_password,
diff --git a/securedrop/tests/pages-layout/functional_test.py b/securedrop/tests/pages-layout/functional_test.py --- a/securedrop/tests/pages-layout/functional_test.py +++ b/securedrop/tests/pages-layout/functional_test.py @@ -44,7 +44,8 @@ def webdriver_fixture(self, request): if not os.path.exists(self.log_dir): os.makedirs(self.log_dir) self.new_profile = webdriver.FirefoxProfile() - self.new_profile.set_preference("intl.accept_languages", self.accept_languages) + self.new_profile.set_preference("intl.accept_languages", + self.accept_languages) self.override_driver = True yield None
[functional testing] Fix staging CI job on tbb-0.9.0 We removed the application/functional test run from the staging environment in #3697. We should also update the testinfra test references and remove the application test run from CI, otherwise we get a few testinfra test failures due to pip deps, and an error when we attempt to run the application tests in CI: ``` TASK [Run application tests] *************************************************** Friday 10 August 2018 19:28:17 +0000 (0:00:00.037) 0:01:08.223 ********* fatal: [app-staging]: FAILED! => {"changed": true, "msg": "non-zero return code", "rc": 127, "stderr": "Shared connection to 52.36.194.59 closed.\r\n", "stdout": "/home/sdrop/.ansible/tmp/ansible-tmp-1533929297.62-93522333058246/app-tests.sh: line 13: pytest: command not found\r\n", "stdout_lines": ["/home/sdrop/.ansible/tmp/ansible-tmp-1533929297.62-93522333058246/app-tests.sh: line 13: pytest: command not found"]} ...ignoring ```
2018-08-15T18:20:57Z
[]
[]
freedomofpress/securedrop
3,724
freedomofpress__securedrop-3724
[ "3701" ]
e59232d14c185def7a651e5753824c835536ac6f
diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py --- a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py +++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py @@ -14,7 +14,7 @@ ossec_version: description: - version number of release to download - default: "2.8.2" + default: "3.0.0" required: no notes: - The OSSEC version to download is hardcoded to avoid surprises. @@ -23,15 +23,15 @@ ''' EXAMPLES = ''' - ossec_urls: - ossec_version: "2.8.2" + ossec_version: "3.0.0" ''' -import re # noqa: E402 +import re # noqa: F401 HAS_REQUESTS = True try: - import requests + import requests # noqa: F401 except ImportError: HAS_REQUESTS = False @@ -39,60 +39,38 @@ class OSSECURLs(): def __init__(self, ossec_version): + self.REPO_URL = "https://github.com/ossec/ossec-hids" self.ossec_version = ossec_version - - checksums = self.parse_checksums() - self.ansible_facts = dict( ossec_version=self.ossec_version, ossec_tarball_filename=self.ossec_tarball_filename, ossec_tarball_url=self.ossec_tarball_url, - ossec_checksum_filename=self.ossec_checksum_filename, - ossec_checksum_url=self.ossec_checksum_url, + ossec_signature_filename=self.ossec_signature_filename, + ossec_signature_url=self.ossec_signature_url, ) - self.ansible_facts.update(checksums) - @property def ossec_tarball_filename(self): return "ossec-hids-{}.tar.gz".format(self.ossec_version) @property def ossec_tarball_url(self): - return "https://github.com/ossec/ossec-hids/archive/{}.tar.gz".format( - self.ossec_version) + return self.REPO_URL + "/archive/{}.tar.gz".format(self.ossec_version) @property - def ossec_checksum_url(self): - return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format( # noqa: E501 - self.ossec_version, self.ossec_checksum_filename) + def ossec_signature_url(self): + return self.REPO_URL + "/releases/download/{}/{}".format( + self.ossec_version, self.ossec_signature_filename) @property - def ossec_checksum_filename(self): - return "{}-checksum.txt".format(self.ossec_tarball_filename) - - def parse_checksums(self): - r = requests.get(self.ossec_checksum_url) - checksum_regex = re.compile(r''' - ^MD5\( - ''' - + re.escape(self.ossec_tarball_filename) + - r'''\)=\s+(?P<ossec_md5_checksum>[0-9a-f]{32})\s+ - SHA1\( - ''' - + re.escape(self.ossec_tarball_filename) + - r'''\)=\s+(?P<ossec_sha1_checksum>[0-9a-f]{40})$ - ''', re.VERBOSE | re.MULTILINE - ) - checksum_list = r.content.rstrip() - results = re.match(checksum_regex, checksum_list).groupdict() - return results + def ossec_signature_filename(self): + return "ossec-hids-{}.tar.gz.asc".format(self.ossec_version) def main(): module = AnsibleModule( # noqa: F405 argument_spec=dict( - ossec_version=dict(default="2.8.2"), + ossec_version=dict(default="3.0.0"), ), supports_check_mode=False )
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder/tests/vars.yml @@ -1,6 +1,6 @@ --- securedrop_version: "0.10.0~rc1" -ossec_version: "2.8.2" +ossec_version: "3.0.0" keyring_version: "0.1.2" config_version: "0.1.1" grsec_version: "4.4.144"
Update OSSEC to v3.0 ## Description OSSEC 3.0 was released on July 17th 2018[0], containing a large amount of bug fixes (including 2 security fixes) as well as new major functionality. Of note, it supports whitelisting syscheck md5 hashes in a sqlite database, potentially reducing notification noise. ## User Research Evidence Users like up-to-date packages ## User Stories As a SecureDrop administrator, I would like to have all packages updated and would like to minimize alerts/noise. [0] https://github.com/ossec/ossec-hids/releases
2018-08-21T21:08:41Z
[]
[]
freedomofpress/securedrop
3,748
freedomofpress__securedrop-3748
[ "3579" ]
0c6e184763be0813555f4abf15c0566f49dce952
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -36,6 +36,7 @@ import prompt_toolkit from prompt_toolkit.validation import Validator, ValidationError import yaml +from pkg_resources import parse_version sdlog = logging.getLogger(__name__) RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77' @@ -628,6 +629,9 @@ def check_for_updates(args): # Do not check out any release candidate tags all_prod_tags = [x for x in all_tags if 'rc' not in x] + # We want the tags to be sorted based on semver + all_prod_tags.sort(key=parse_version) + latest_tag = all_prod_tags[-1] if current_tag != latest_tag:
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -66,6 +66,20 @@ def test_check_for_updates_update_needed(self, tmpdir, caplog): assert update_status is True assert tag == '0.6.1' + def test_check_for_updates_higher_version(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = "0.6" + tags_available = "0.1\n0.10.0\n0.6.2\n0.6\n0.6-rc1\n0.9.0\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "Update needed" in caplog.text + assert update_status is True + assert tag == '0.10.0' + def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog): """Regression test for #3426""" git_repo_path = str(tmpdir)
securedrop-admin check_for_updates is showing update needed when it should not ## Description ./securedrop-admin check_for_updates is showing update needed when it should not ## Steps to Reproduce ``` amnesia@amnesia:~/Persistent/securedrop$ git checkout develop Switched to branch 'develop' Your branch is up-to-date with 'origin/develop'. amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ git checkout 0.8.0-rc3 Note: checking out '0.8.0-rc3'. You are in 'detached HEAD' state. You can look around, make experimental changes and commit them, and you can discard any commits you make in this state without impacting any branches by performing another checkout. If you want to create a new branch to retain commits you create, you may do so (now or later) by using -b with the checkout command again. Example: git checkout -b <new-branch-name> HEAD is now at 20289641... SecureDrop 0.8.0~rc3 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.8.0-rc3 amnesia@amnesia:~/Persistent/securedrop$ git checkout 0.10.0 Previous HEAD position was 20289641... SecureDrop 0.8.0~rc3 HEAD is now at 4720be05... Merge pull request #3570 from dachary/wip-i18n amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ ``` ## Expected Behavior Should not say updated required. ## Actual Behavior Saying update needed. ## Comments Suggestions to fix, any other relevant information.
This impacts 0.10.0 and later: ``` amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.9.0 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: All updates applied amnesia@amnesia:~/Persistent/securedrop$ git describe 0.9.0 amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.10.0 ``` because the output of `git tag` (which is the subprocess call done in the update functionality) is not sorted: ``` 0.1 0.10.0 0.2 0.2.1 0.3 0.3.1 0.3.10 0.3.11 0.3.12 0.3.2 0.3.3 0.3.4 0.3.5 0.3.6 0.3.7 0.3.7.post1 0.3.8 0.3.9 0.3pre 0.4 0.4.1 0.4.2 0.4.3 0.4.4 0.5 0.5.1 0.5.2 0.6 0.6-rc1 0.6-rc2 0.6-rc3 0.6-rc4 0.6-rc5 0.7.0 0.7.0-rc1 0.7.0-rc2 0.7.0-rc3 0.7.0-rc4 0.7.0-rc5 0.8-rc1 0.8.0 0.8.0-rc1 0.8.0-rc2 0.8.0-rc3 0.9.0 ``` Ideally: We don't fix this and instead implement #3502 Since we did not make progress on #3502 in this release cycle, this bug must be fixed in 0.9.0 Implementing https://github.com/freedomofpress/securedrop/issues/3502 will take time and also not an easy task to implement and do QA a week before the release. For now I would suggest to sort the versions following `semver` sorting logic of `setuptools` and we will not need to install anything new. I will submit a PR on this.
2018-08-28T12:34:18Z
[]
[]
freedomofpress/securedrop
3,750
freedomofpress__securedrop-3750
[ "3579" ]
f461e9fff997519b152c0a9569ee67b959763d66
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -36,6 +36,7 @@ import prompt_toolkit from prompt_toolkit.validation import Validator, ValidationError import yaml +from pkg_resources import parse_version sdlog = logging.getLogger(__name__) RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77' @@ -628,6 +629,9 @@ def check_for_updates(args): # Do not check out any release candidate tags all_prod_tags = [x for x in all_tags if 'rc' not in x] + # We want the tags to be sorted based on semver + all_prod_tags.sort(key=parse_version) + latest_tag = all_prod_tags[-1] if current_tag != latest_tag:
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -66,6 +66,20 @@ def test_check_for_updates_update_needed(self, tmpdir, caplog): assert update_status is True assert tag == '0.6.1' + def test_check_for_updates_higher_version(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = "0.6" + tags_available = "0.1\n0.10.0\n0.6.2\n0.6\n0.6-rc1\n0.9.0\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "Update needed" in caplog.text + assert update_status is True + assert tag == '0.10.0' + def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog): """Regression test for #3426""" git_repo_path = str(tmpdir)
securedrop-admin check_for_updates is showing update needed when it should not ## Description ./securedrop-admin check_for_updates is showing update needed when it should not ## Steps to Reproduce ``` amnesia@amnesia:~/Persistent/securedrop$ git checkout develop Switched to branch 'develop' Your branch is up-to-date with 'origin/develop'. amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ git checkout 0.8.0-rc3 Note: checking out '0.8.0-rc3'. You are in 'detached HEAD' state. You can look around, make experimental changes and commit them, and you can discard any commits you make in this state without impacting any branches by performing another checkout. If you want to create a new branch to retain commits you create, you may do so (now or later) by using -b with the checkout command again. Example: git checkout -b <new-branch-name> HEAD is now at 20289641... SecureDrop 0.8.0~rc3 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.8.0-rc3 amnesia@amnesia:~/Persistent/securedrop$ git checkout 0.10.0 Previous HEAD position was 20289641... SecureDrop 0.8.0~rc3 HEAD is now at 4720be05... Merge pull request #3570 from dachary/wip-i18n amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ ``` ## Expected Behavior Should not say updated required. ## Actual Behavior Saying update needed. ## Comments Suggestions to fix, any other relevant information.
This impacts 0.10.0 and later: ``` amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.9.0 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: All updates applied amnesia@amnesia:~/Persistent/securedrop$ git describe 0.9.0 amnesia@amnesia:~/Persistent/securedrop$ git tag -a 0.10.0 amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin check_for_updates INFO: Checking for SecureDrop updates... Fetching origin INFO: Update needed amnesia@amnesia:~/Persistent/securedrop$ git describe 0.10.0 ``` because the output of `git tag` (which is the subprocess call done in the update functionality) is not sorted: ``` 0.1 0.10.0 0.2 0.2.1 0.3 0.3.1 0.3.10 0.3.11 0.3.12 0.3.2 0.3.3 0.3.4 0.3.5 0.3.6 0.3.7 0.3.7.post1 0.3.8 0.3.9 0.3pre 0.4 0.4.1 0.4.2 0.4.3 0.4.4 0.5 0.5.1 0.5.2 0.6 0.6-rc1 0.6-rc2 0.6-rc3 0.6-rc4 0.6-rc5 0.7.0 0.7.0-rc1 0.7.0-rc2 0.7.0-rc3 0.7.0-rc4 0.7.0-rc5 0.8-rc1 0.8.0 0.8.0-rc1 0.8.0-rc2 0.8.0-rc3 0.9.0 ``` Ideally: We don't fix this and instead implement #3502 Since we did not make progress on #3502 in this release cycle, this bug must be fixed in 0.9.0 Implementing https://github.com/freedomofpress/securedrop/issues/3502 will take time and also not an easy task to implement and do QA a week before the release. For now I would suggest to sort the versions following `semver` sorting logic of `setuptools` and we will not need to install anything new. I will submit a PR on this.
2018-08-28T19:51:07Z
[]
[]
freedomofpress/securedrop
3,756
freedomofpress__securedrop-3756
[ "3754" ]
68cbfeaf5ad25e37007757a663b36f6902e4fd35
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.9.0~rc1' +version = '0.9.0~rc2' # The full version, including alpha/beta/rc tags. -release = '0.9.0~rc1' +release = '0.9.0~rc2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.9.0~rc1' +__version__ = '0.9.0~rc2'
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "0.9.0~rc1" +securedrop_version: "0.9.0~rc2" ossec_version: "2.8.2" keyring_version: "0.1.2" config_version: "0.1.1"
Builder image needs updates ## Description sd-builder image needs to be updated. ## Steps to Reproduce `make build-debs` and observe error. ## Expected Behavior `make build-debs` should exit without error. ## Actual Behavior `make-build-debs` returns an error, security updates are needed for the container. ## Comments Instructions are available here: https://docs.securedrop.org/en/latest/development/dockerbuildmaint.html
2018-08-30T15:04:53Z
[]
[]
freedomofpress/securedrop
3,767
freedomofpress__securedrop-3767
[ "3766" ]
d123bac529ca3018eedd133965095ab68e3cfbe8
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.9.0~rc2' +version = '0.9.0~rc3' # The full version, including alpha/beta/rc tags. -release = '0.9.0~rc2' +release = '0.9.0~rc3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.9.0~rc2' +__version__ = '0.9.0~rc3'
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "0.9.0~rc2" +securedrop_version: "0.9.0~rc3" ossec_version: "2.8.2" keyring_version: "0.1.2" config_version: "0.1.1"
Update builder hash ## Status Ready for review ## Description of Changes Fixes #3765 , updates securedrop buillder image with latest packages ## Testing `make build-debs` should complete without error. ## Deployment Dev only
2018-08-31T18:57:21Z
[]
[]
freedomofpress/securedrop
3,794
freedomofpress__securedrop-3794
[ "3727" ]
b2d7874b62fdbdd93fc505569b9597628ec12dfb
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.9.0~rc1' +version = '0.10.0~rc1' # The full version, including alpha/beta/rc tags. -release = '0.9.0~rc1' +release = '0.10.0~rc1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.9.0~rc1' +__version__ = '0.10.0~rc1'
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "0.9.0~rc1" +securedrop_version: "0.10.0~rc1" ossec_version: "2.8.2" keyring_version: "0.1.2" config_version: "0.1.1"
Release SecureDrop 0.9.0 This is a tracking issue for the upcoming release of SecureDrop 0.9.0 - tasks may get added or modified. **Feature freeze:** August 22, 2018 **String freeze:** August 29, 2018 **Pre-release announcement:** August 29, 2018 **Release date:** September 5, 2018 _SecureDrop maintainers and testers:_ As you QA 0.9.0, please report back your testing results as comments on this ticket. File GitHub issues for any problems found, tag them "QA: Release", and associate them with the 0.9 milestone for tracking. Test debian packages will be posted on https://apt-test.freedom.press signed with [the test key](https://gist.githubusercontent.com/conorsch/ec4008b111bc3142fca522693f3cce7e/raw/2968621e8ad92db4505a31fcc5776422d7d26729/apt-test%2520apt%2520pubkey). An Ansible playbook testing the upgrade path is [here](https://gist.github.com/conorsch/e7556624df59b2a0f8b81f7c0c4f9b7d). # Prepare release candidate (0.9.0~rc2) - [x] Update `securedrop-keyring` package (#3723) - [x] Prepare 0.9.0-rc2 release changelog - @emkll - [x] Prepare test plan for 0.9.0~rc2 - @emkll and @redshiftzero - [x] Create pull request for 0.9.0~rc2 into release/0.9 - @emkll - [x] Build debs (including linux-{image,firmware}) and put up `0.9.0~rc2` on test apt server - @emkll # Prepare release candidate (0.9.0~rc1) - [x] Prepare 0.9.0 release changelog - @emkll - [x] Write and send pre-release announcement (#3725 ) - @eloquence - [x] Check for Tor stable release (`0.3.3.9` is currently on apt-test) - [x] Prepare test plan for 0.9.0~rc1 - @emkll and @redshiftzero - [x] Branch `release/0.9` off `develop` - [x] Build debs (including linux-{image,firmware}) and put up `0.9.0~rc1` on test apt server - @emkll # **[QA Matrix for SecureDrop 0.9.0](https://docs.google.com/spreadsheets/d/1hp85kSusSO_nsR0oLLGukr44mGufqSbbP5wi_THqW7U/edit#gid=652784916)** After each test, please update the QA matrix and post details for Basic Server Testing, Application Acceptance Testing and 0.9.0-specific testing below. # Final release - [x] Merge final translations - [x] Push updated signing key to keyservers - [x] Push signed tag - [x] Update tor-apt repo with latest tor - [x] Build final Debian packages for 0.9.0 - [x] Upload Debian packages (including new `securedrop-keyring`, `linux-image`, `linux-firmware`, and `tor` packages) - [x] Pre-Flight: Test install (not upgrade) of 0.9.0 works w/ prod repo debs - [x] Write and send release announcement (#3726 ) - @eloquence - [x] Publish blog post about 0.9.0 Debian package release and instructions for admins # Post release - [x] Merge changelog (i.e. rc commits) back to `develop` - [x] Bump version on `develop` in prep for 0.10.0 release
2018-09-05T23:47:45Z
[]
[]
freedomofpress/securedrop
3,863
freedomofpress__securedrop-3863
[ "3739" ]
6539cf67f79976e695493c0add669b70aab7cdb9
diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py --- a/securedrop/qa_loader.py +++ b/securedrop/qa_loader.py @@ -22,9 +22,6 @@ random.seed('~(=^–^)') # mrow? -JOURNALIST_COUNT = 10 -SOURCE_COUNT = 50 - def random_bool(): return bool(random.getrandbits(1)) @@ -56,128 +53,6 @@ def random_datetime(nullable): ) -def new_journalist(): - # Make a diceware-like password - pw = ' '.join([random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) - for _ in range(7)]) - journalist = Journalist(random_chars(random.randint(3, 32), - nullable=False), - pw, - random_bool()) - if random_bool(): - # to add legacy passwords back in - journalist.passphrase_hash = None - journalist.pw_salt = random_chars(32, nullable=False) - journalist.pw_hash = random_chars(64, nullable=False) - - journalist.is_admin = bool_or_none() - - journalist.is_totp = bool_or_none() - journalist.hotp_counter = (random.randint(-1000, 1000) - if random_bool() else None) - journalist.created_on = random_datetime(nullable=True) - journalist.last_access = random_datetime(nullable=True) - - db.session.add(journalist) - - -def new_source(): - fid_len = random.randint(4, 32) - designation_len = random.randint(4, 32) - source = Source(random_chars(fid_len, nullable=False, - chars=string.ascii_lowercase), - random_chars(designation_len, nullable=False)) - source.flagged = bool_or_none() - source.last_updated = random_datetime(nullable=True) - source.pending = False - - db.session.add(source) - - -def new_submission(config, source_id): - source = Source.query.get(source_id) - - # A source may have a null fid according to the DB, but this will - # break storage.path. - if source.filesystem_id is None: - return - - filename = fake_file(config, source.filesystem_id) - submission = Submission(source, filename) - - # For issue #1189 - if random_bool(): - submission.source_id = None - - submission.downloaded = bool_or_none() - - db.session.add(submission) - - -def fake_file(config, source_fid): - source_dir = path.join(config.STORE_DIR, source_fid) - if not path.exists(source_dir): - os.mkdir(source_dir) - - filename = random_chars(20, nullable=False, chars=string.ascii_lowercase) - num = random.randint(0, 100) - msg_type = 'msg' if random_bool() else 'doc.gz' - filename = '{}-{}-{}.gpg'.format(num, filename, msg_type) - f_len = int(math.floor(random.expovariate(100000) * 1024 * 1024 * 500)) - sub_path = current_app.storage.path(source_fid, filename) - with open(sub_path, 'w') as f: - f.write('x' * f_len) - - return filename - - -def new_source_star(source_id): - source = Source.query.get(source_id) - star = SourceStar(source, bool_or_none()) - db.session.add(star) - - -def new_reply(config, journalist_id, source_id): - source = Source.query.get(source_id) - - # A source may have a null fid according to the DB, but this will - # break storage.path. - if source.filesystem_id is None: - return - - journalist = Journalist.query.get(journalist_id) - filename = fake_file(config, source.filesystem_id) - reply = Reply(journalist, source, filename) - db.session.add(reply) - - -def new_journalist_login_attempt(journalist_id): - journalist = Journalist.query.get(journalist_id) - attempt = JournalistLoginAttempt(journalist) - attempt.timestamp = random_datetime(nullable=True) - db.session.add(attempt) - - -def new_abandoned_submission(config, source_id): - '''For issue #1189''' - - source = Source.query.filter(Source.filesystem_id.isnot(None)).all()[0] - filename = fake_file(config, source.filesystem_id) - - # Use this as hack to create a real submission then swap out the source_id - submission = Submission(source, filename) - submission.source_id = source_id - db.session.add(submission) - db.session.commit() - delete_source(source_id) - - -def delete_source(source_id): - '''For issue #1189''' - db.session.execute(text('DELETE FROM sources WHERE id = :source_id'), - {'source_id': source_id}) - - def positive_int(s): i = int(s) if i < 1: @@ -185,40 +60,174 @@ def positive_int(s): return i -def load_data(config, multiplier): - app = create_app(config) - - with app.app_context(): - for _ in range(JOURNALIST_COUNT * multiplier): - new_journalist() +class QaLoader(object): + + JOURNALIST_COUNT = 10 + SOURCE_COUNT = 50 + + def __init__(self, config, multiplier): + self.config = config + self.app = create_app(config) + self.multiplier = multiplier + + self.journalists = [] + self.sources = [] + self.submissions = [] + + def new_journalist(self): + # Make a diceware-like password + pw = ' '.join( + [random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) + for _ in range(7)]) + journalist = Journalist(random_chars(random.randint(3, 32), + nullable=False), + pw, + random_bool()) + if random_bool(): + # to add legacy passwords back in + journalist.passphrase_hash = None + journalist.pw_salt = random_chars(32, nullable=False) + journalist.pw_hash = random_chars(64, nullable=False) + + journalist.is_admin = bool_or_none() + + journalist.is_totp = bool_or_none() + journalist.hotp_counter = (random.randint(-1000, 1000) + if random_bool() else None) + journalist.created_on = random_datetime(nullable=True) + journalist.last_access = random_datetime(nullable=True) + + db.session.add(journalist) + db.session.flush() + self.journalists.append(journalist.id) + + def new_source(self): + fid_len = random.randint(4, 32) + designation_len = random.randint(4, 32) + source = Source(random_chars(fid_len, nullable=False, + chars=string.ascii_lowercase), + random_chars(designation_len, nullable=False)) + source.flagged = bool_or_none() + source.last_updated = random_datetime(nullable=False) + source.pending = False + + db.session.add(source) + db.session.flush() + self.sources.append(source.id) + + def new_submission(self, source_id): + source = Source.query.get(source_id) + + # A source may have a null fid according to the DB, but this will + # break storage.path. + if source.filesystem_id is None: + return + + filename = self.fake_file(source.filesystem_id) + submission = Submission(source, filename) + + # For issue #1189 + if random_bool(): + submission.source_id = None + + submission.downloaded = bool_or_none() + + db.session.add(submission) + db.session.flush() + self.submissions.append(submission.id) + + def fake_file(self, source_fid): + source_dir = path.join(self.config.STORE_DIR, source_fid) + if not path.exists(source_dir): + os.mkdir(source_dir) + + filename = random_chars(20, + nullable=False, + chars=string.ascii_lowercase) + num = random.randint(0, 100) + msg_type = 'msg' if random_bool() else 'doc.gz' + filename = '{}-{}-{}.gpg'.format(num, filename, msg_type) + f_len = int(math.floor(random.expovariate(100000) * 1024 * 1024 * 500)) + sub_path = current_app.storage.path(source_fid, filename) + with open(sub_path, 'w') as f: + f.write('x' * f_len) + + return filename + + def new_source_star(self, source_id): + source = Source.query.get(source_id) + star = SourceStar(source, bool_or_none()) + db.session.add(star) + + def new_reply(self, journalist_id, source_id): + source = Source.query.get(source_id) + + # A source may have a null fid according to the DB, but this will + # break storage.path. + if source.filesystem_id is None: + return + + journalist = Journalist.query.get(journalist_id) + filename = self.fake_file(source.filesystem_id) + reply = Reply(journalist, source, filename) + db.session.add(reply) + + def new_journalist_login_attempt(self, journalist_id): + journalist = Journalist.query.get(journalist_id) + attempt = JournalistLoginAttempt(journalist) + attempt.timestamp = random_datetime(nullable=True) + db.session.add(attempt) + + def new_abandoned_submission(self, source_id): + '''For issue #1189''' + + source = Source.query.filter(Source.filesystem_id.isnot(None)).all()[0] + filename = self.fake_file(source.filesystem_id) + + # Use this as hack to create a real submission then swap out the + # source_id + submission = Submission(source, filename) + submission.source_id = source_id + db.session.add(submission) db.session.commit() + self.delete_source(source_id) - for _ in range(SOURCE_COUNT * multiplier): - new_source() - db.session.commit() + def delete_source(self, source_id): + '''For issue #1189''' + db.session.execute(text('DELETE FROM sources WHERE id = :source_id'), + {'source_id': source_id}) - for sid in range(1, SOURCE_COUNT * multiplier, 5): - for _ in range(1, multiplier + 1): - new_submission(config, sid) - db.session.commit() + def load(self): + with self.app.app_context(): + for _ in range(self.JOURNALIST_COUNT * self.multiplier): + self.new_journalist() + db.session.commit() - for sid in range(1, SOURCE_COUNT * multiplier, 5): - new_source_star(sid) - db.session.commit() + for _ in range(self.SOURCE_COUNT * self.multiplier): + self.new_source() + db.session.commit() - for jid in range(1, JOURNALIST_COUNT * multiplier, 10): - for sid in range(1, SOURCE_COUNT * multiplier, 10): - for _ in range(1, 3): - new_reply(config, jid, sid) - db.session.commit() + for sid in self.sources[0::5]: + for _ in range(1, self.multiplier + 1): + self.new_submission(sid) + db.session.commit() - for jid in range(1, JOURNALIST_COUNT * multiplier, 10): - new_journalist_login_attempt(jid) - db.session.commit() + for sid in self.sources[0::5]: + self.new_source_star(sid) + db.session.commit() + + for jid in self.journalists[0::10]: + for sid in self.sources[0::10]: + for _ in range(1, 3): + self.new_reply(jid, sid) + db.session.commit() + + for jid in self.journalists[0::10]: + self.new_journalist_login_attempt(jid) + db.session.commit() - for sid in range(SOURCE_COUNT * multiplier, - SOURCE_COUNT * multiplier + multiplier): - new_abandoned_submission(config, sid) + for sid in random.sample(self.sources, self.multiplier): + self.new_abandoned_submission(sid) def arg_parser(): @@ -234,7 +243,7 @@ def arg_parser(): def main(): args = arg_parser().parse_args() print('Loading data. This make take a while.') - load_data(sdconfig, args.multiplier) + QaLoader(sdconfig, args.multiplier).load() if __name__ == '__main__':
diff --git a/securedrop/tests/test_qa_loader.py b/securedrop/tests/test_qa_loader.py --- a/securedrop/tests/test_qa_loader.py +++ b/securedrop/tests/test_qa_loader.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -from qa_loader import load_data +from qa_loader import QaLoader def test_load_data(journalist_app, config): # Use the journalist_app fixture to init the DB - load_data(config, multiplier=1) + QaLoader(config, multiplier=1).load()
qa_loader.py creates records referencing pre-existing sources and journalists ## Description `/var/www/securedrop/qa_loader.py` is used during testing to create large amounts of random test data. If there are pre-existing journalists and sources in the database it may create random replies and submissions for them, confusing testing. ## Steps to Reproduce - Install 0.8.0 - Create admin account via CLI on app server - As source, submit message and record source codephrase - As admin, reply to message - Log in to app server and run ``` sudo su cd /var/www/securedrop ./qa_loader.py --multiplier=1 ``` - Upgrade to 0.9.0~rc1 - As previous source, check for replies ## Expected Behavior Source sees original reply from journalist ## Actual Behavior Source sees reply from journalist preceded by blank replies ## Comments Screenshot attached. This is probably a low-priority bug, but it would make testing easier if it were fixed. ![screenshot from 2018-08-24 23-40-36](https://user-images.githubusercontent.com/2782952/44612989-807aad80-a7db-11e8-97b8-57a4fd67abc2.png)
The simple fix here is in QA to: 0. Create admin users 1. Run the qa loader 2. Submit document as source Submitting the document as a source after running the QA loader sidesteps this issue - let's do that from now on. I can fix this. We just need to keep track of the sources/docs/etc that we create in lists and then use those in the following steps. Also, it hasn't happened yet, but there's another bug where if you create a source then delete one the loader will crap out because it uses `range`s to iterate over `id`s.
2018-10-10T19:04:35Z
[]
[]
freedomofpress/securedrop
3,875
freedomofpress__securedrop-3875
[ "3862" ]
6539cf67f79976e695493c0add669b70aab7cdb9
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -62,6 +62,7 @@ def index(): # the Pocoo style guide, IMHO: # http://www.pocoo.org/internal/styleguide/ sources = Source.query.filter_by(pending=False) \ + .filter(Source.last_updated.isnot(None)) \ .order_by(Source.last_updated.desc()) \ .all() for source in sources:
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1942,3 +1942,20 @@ def test_col_process_successfully_unstars_sources(journalist_app, source = Source.query.get(test_source['id']) assert not source.star.starred + + +def test_source_with_null_last_updated(journalist_app, + test_journo, + test_files): + '''Regression test for issues #3862''' + + source = test_files['source'] + source.last_updated = None + db.session.add(source) + db.session.commit() + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.get(url_for('main.index')) + assert resp.status_code == 200
Sources with a null value for last_updated will break the web UI ## Description If a source is created without a `last_updated` filed, it will cause the following error on the journalist index page. ``` 172.17.0.1 - - [10/Oct/2018 18:49:40] "GET / HTTP/1.1" 500 - Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_app/main.py", line 79, in index starred=starred) File "/usr/local/lib/python2.7/dist-packages/flask/templating.py", line 135, in render_template context, ctx.app) File "/usr/local/lib/python2.7/dist-packages/flask/templating.py", line 117, in _render rv = template.render(context) File "/usr/local/lib/python2.7/dist-packages/jinja2/environment.py", line 1008, in render return self.environment.handle_exception(exc_info, True) File "/usr/local/lib/python2.7/dist-packages/jinja2/environment.py", line 780, in handle_exception reraise(exc_type, exc_value, tb) File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_templates/index.html", line 1, in top-level template code {% extends "base.html" %} File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_templates/base.html", line 50, in top-level template code {% block body %}{% endblock %} File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_templates/index.html", line 25, in block "body" {% include '_source_row.html' %} File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_templates/_source_row.html", line 4, in top-level template code <time class="date" title="{{ source.last_updated|rel_datetime_format }}" datetime="{{ source.last_updated|rel_datetime_format(fmt="%Y-%m-%d %H:%M:%S%Z") }}">{{ source.last_updated|rel_datetime_format(relative=True) }}</time> File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/template_filters.py", line 12, in rel_datetime_format time = dates.format_timedelta(datetime.utcnow() - dt, TypeError: unsupported operand type(s) for -: 'datetime.datetime' and 'NoneType' ``` The root cause of this is the schema in `sqlite` permits null values in the `last_updated` columns. ```sql CREATE TABLE sources ( id INTEGER NOT NULL, uuid VARCHAR(36) NOT NULL, filesystem_id VARCHAR(96), journalist_designation VARCHAR(255) NOT NULL, flagged BOOLEAN, last_updated DATETIME, -- ^ allows null :( pending BOOLEAN, interaction_count INTEGER NOT NULL, PRIMARY KEY (id), UNIQUE (uuid), UNIQUE (filesystem_id), CHECK (flagged IN (0, 1)), CHECK (pending IN (0, 1)) ); ``` ## Steps to Reproduce Manually create a source with `last_updated = null`. ## Expected Behavior The app should recover gracefully or (better) disallow this entirely. ## Actual Behavior The app blows up.
2018-10-14T10:14:33Z
[]
[]
freedomofpress/securedrop
3,893
freedomofpress__securedrop-3893
[ "3892" ]
8725cfc4a9b47368a11a91249691d35db87665cc
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -212,8 +212,9 @@ def delete(): history. """ - query = Reply.query.filter( - Reply.filename == request.form['reply_filename']) + query = Reply.query.filter_by( + filename=request.form['reply_filename'], + source_id=g.source.id) reply = get_one_or_else(query, current_app.logger, abort) reply.deleted_by_source = True db.session.add(reply)
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -5,7 +5,7 @@ import subprocess from cStringIO import StringIO -from flask import session, escape, current_app, url_for +from flask import session, escape, current_app, url_for, g from mock import patch, ANY import crypto_util @@ -647,3 +647,47 @@ def test_csrf_error_page(config, source_app): resp = app.post(url_for('main.create'), follow_redirects=True) text = resp.data.decode('utf-8') assert 'Your session timed out due to inactivity' in text + + +def test_source_can_only_delete_own_replies(source_app): + '''This test checks for a bug an authenticated source A could delete + replies send to source B by "guessing" the filename. + ''' + source0, codename0 = utils.db_helper.init_source() + source1, codename1 = utils.db_helper.init_source() + journalist, _ = utils.db_helper.init_journalist() + replies = utils.db_helper.reply(journalist, source0, 1) + filename = replies[0].filename + confirmation_msg = 'Reply deleted' + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename1}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source1.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 404 + assert confirmation_msg not in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert not reply.deleted_by_source + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename0}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source0.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 200 + assert confirmation_msg in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert reply.deleted_by_source
Source "X" can delete replies to source "Y" via the source interface # Steps to reproduce 1. Boot dev env 1. Create source X, submit 1. Open private tab, create source Y, submit 1. Login as a journalist, reply to both 1. In the `/lookup` page for source X, inspect the reply form and copy the hidden input `reply_filename` 1. In the `/lookup` page for source Y, alter the reply form to have the copied value, submit/confirm 1. Reload X's `/lookup` page and see that the reply is missing ## Expected Behavior X cannot delete Y's reply ## Actual Behavior X **can** delete Y's reply # Relevant code `securedrop/source_app/main.py` ```python @view.route('/delete', methods=('POST',)) @login_required def delete(): """This deletes the reply from the source's inbox, but preserves the history for journalists such that they can view conversation history. """ query = Reply.query.filter( Reply.filename == request.form['reply_filename']) reply = get_one_or_else(query, current_app.logger, abort) reply.deleted_by_source = True db.session.add(reply) db.session.commit() flash(gettext("Reply deleted"), "notification") return redirect(url_for('.lookup')) ``` Looking at the `Reply.query.filter`, you can see that we do not check that a given response belongs to a source. # `git blame` Not blaming, but listing it here for the eventual report to save y'all some time. This was introduced in commit `83d2329b77` on 2015-02-15. # Analysis Any authenticated source can delete replies belonging to any other source. However, they must know the filename, but we don't throttle anything on the source interface so a determined attacker could in theory disrupt source/journalist communication. Estimated entropy of the filename: ``` >>> # num * nouns * adjectives * {doc,msg} >>> math.log(100 *17949 * 8222 * 2, 2) 34.780745694714774 ``` It seems feasible that a determined attacker could disrupt communications with some frequency on a high traffic instance.
2018-10-22T17:32:24Z
[]
[]
freedomofpress/securedrop
3,894
freedomofpress__securedrop-3894
[ "3892" ]
97b99dfe1cc3bcb2f575418a8da6a771d27ec874
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -212,8 +212,9 @@ def delete(): history. """ - query = Reply.query.filter( - Reply.filename == request.form['reply_filename']) + query = Reply.query.filter_by( + filename=request.form['reply_filename'], + source_id=g.source.id) reply = get_one_or_else(query, current_app.logger, abort) reply.deleted_by_source = True db.session.add(reply)
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -5,7 +5,7 @@ import subprocess from cStringIO import StringIO -from flask import session, escape, current_app, url_for +from flask import session, escape, current_app, url_for, g from mock import patch, ANY import crypto_util @@ -647,3 +647,47 @@ def test_csrf_error_page(config, source_app): resp = app.post(url_for('main.create'), follow_redirects=True) text = resp.data.decode('utf-8') assert 'Your session timed out due to inactivity' in text + + +def test_source_can_only_delete_own_replies(source_app): + '''This test checks for a bug an authenticated source A could delete + replies send to source B by "guessing" the filename. + ''' + source0, codename0 = utils.db_helper.init_source() + source1, codename1 = utils.db_helper.init_source() + journalist, _ = utils.db_helper.init_journalist() + replies = utils.db_helper.reply(journalist, source0, 1) + filename = replies[0].filename + confirmation_msg = 'Reply deleted' + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename1}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source1.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 404 + assert confirmation_msg not in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert not reply.deleted_by_source + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename0}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source0.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 200 + assert confirmation_msg in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert reply.deleted_by_source
Source "X" can delete replies to source "Y" via the source interface # Steps to reproduce 1. Boot dev env 1. Create source X, submit 1. Open private tab, create source Y, submit 1. Login as a journalist, reply to both 1. In the `/lookup` page for source X, inspect the reply form and copy the hidden input `reply_filename` 1. In the `/lookup` page for source Y, alter the reply form to have the copied value, submit/confirm 1. Reload X's `/lookup` page and see that the reply is missing ## Expected Behavior X cannot delete Y's reply ## Actual Behavior X **can** delete Y's reply # Relevant code `securedrop/source_app/main.py` ```python @view.route('/delete', methods=('POST',)) @login_required def delete(): """This deletes the reply from the source's inbox, but preserves the history for journalists such that they can view conversation history. """ query = Reply.query.filter( Reply.filename == request.form['reply_filename']) reply = get_one_or_else(query, current_app.logger, abort) reply.deleted_by_source = True db.session.add(reply) db.session.commit() flash(gettext("Reply deleted"), "notification") return redirect(url_for('.lookup')) ``` Looking at the `Reply.query.filter`, you can see that we do not check that a given response belongs to a source. # `git blame` Not blaming, but listing it here for the eventual report to save y'all some time. This was introduced in commit `83d2329b77` on 2015-02-15. # Analysis Any authenticated source can delete replies belonging to any other source. However, they must know the filename, but we don't throttle anything on the source interface so a determined attacker could in theory disrupt source/journalist communication. Estimated entropy of the filename: ``` >>> # num * nouns * adjectives * {doc,msg} >>> math.log(100 *17949 * 8222 * 2, 2) 34.780745694714774 ``` It seems feasible that a determined attacker could disrupt communications with some frequency on a high traffic instance.
2018-10-22T17:35:56Z
[]
[]
freedomofpress/securedrop
3,902
freedomofpress__securedrop-3902
[ "3849" ]
55f934cb5ab7072ff574b2dfed86c90162cd050d
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.10.0~rc1' +version = '0.11.0~rc1' # The full version, including alpha/beta/rc tags. -release = '0.10.0~rc1' +release = '0.11.0~rc1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.10.0~rc1' +__version__ = '0.11.0~rc1'
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "0.10.0~rc1" +securedrop_version: "0.11.0~rc1" ossec_version: "3.0.0" keyring_version: "0.1.2" config_version: "0.1.1"
Release SecureDrop 0.10.0 This is a tracking issue for the upcoming release of SecureDrop 0.10.0 - tasks may get added or modified. **Feature freeze:** October 9, 2018 **String freeze:** October 16, 2018 **Pre-release announcement:** October 16, 2018 **Release date:** October 23, 2018 _SecureDrop maintainers and testers:_ As you QA 0.10.0, please report back your testing results as comments on this ticket. File GitHub issues for any problems found, tag them "QA: Release", and associate them with the 0.10.0 milestone for tracking (or ask a maintainer to do so). Test debian packages will be posted on https://apt-test.freedom.press signed with [the test key](https://gist.githubusercontent.com/conorsch/ec4008b111bc3142fca522693f3cce7e/raw/2968621e8ad92db4505a31fcc5776422d7d26729/apt-test%2520apt%2520pubkey). An Ansible playbook testing the upgrade path is [here](https://gist.github.com/conorsch/e7556624df59b2a0f8b81f7c0c4f9b7d). # Prepare release candidate (0.10.0~rc1) - [x] Prepare 0.10.0-rc1 release changelog - @redshiftzero - [x] Branch off 0.10.0~rc1 into release/0.10.0 - @redshiftzero - [skipped] Build debs (including new Tor package) and put up `0.10.0~rc1` on test apt server We merged in two important fixes prior to building debs for `rc1` so stopped here # Prepare release candidate (0.10.0~rc2) - [x] Prepare 0.10.0-rc2 release changelog - @redshiftzero - [x] Prepare test plan for 0.10.0~rc2 - @zenmonkeykstop and @redshiftzero - [x] Push 0.10.0-rc2 tag - @redshiftzero - [x] Build debs (including new Tor package) and put up `0.10.0~rc2` on test apt server - @emkll # Prepare release candidate (0.10.0~rc3) - [x] Prepare 0.10.0-rc3 release changelog - @redshiftzero - [x] Prepare test plan for 0.10.0~rc3 - @emkll - [x] Push 0.10.0-rc3 tag - @redshiftzero - [x] Build debs and put up `0.10.0~rc3` on test apt server - @emkll # Prepare release candidate (0.10.0~rc4) - [x] Prepare 0.10.0-rc4 release changelog - @redshiftzero - [x] Prepare test plan for 0.10.0~rc4 @zenmonkeykstop @emkll - [x] Push 0.10.0-rc4 tag - @redshiftzero - [x] Build debs and put up `0.10.0~rc4` on test apt server - @emkll [we will add checklists for rc5 release candidates and later as we proceed through the release process] Test plan to be posted during business hours Pacific time on Wednesday, October 10th. # [QA Matrix for 0.10.0](https://docs.google.com/spreadsheets/d/1IdW1DITWBvacU-eNp2IlJJdosEuYr4XTWiplQeJu-Lw/edit#gid=0) # [Test Plan for 0.10.0](https://github.com/freedomofpress/securedrop/wiki/0.10.0-Test-Plan) After each test, please update the QA matrix and post details for Basic Server Testing, Application Acceptance Testing and 0.10.0-specific testing below in comments to this ticket. # Final release - [x] Ensure builder in release branch is updated and/or update builder image - @emkll or @msheiny - [x] Merge final translations - @kushaldas - [x] Push signed tag - @redshiftzero - [x] Build final Debian packages for 0.10.0 - @conorsch - [x] Upload Debian packages (including new `linux-image`, `linux-firmware`, and `tor` packages) to apt test - @conorsch - [x] Pre-Flight: Test install and upgrade of 0.10.0 works w/ prod repo debs, test updater logic in Tails - @zenmonkeykstop - [x] Write and send release announcement - @eloquence - [x] Publish blog post about 0.10.0 Debian package release and instructions for admins - @eloquence # Post release - [x] Merge changelog (i.e. rc commits) back to `develop` - @redshiftzero - [x] Bump version on `develop` in prep for 0.11.0 release - @redshiftzero
2018-10-23T21:14:36Z
[]
[]
freedomofpress/securedrop
3,909
freedomofpress__securedrop-3909
[ "3702" ]
9b4924cfa466a45d00804d9c266284caa064d7d6
diff --git a/molecule/testinfra/staging/conftest.py b/molecule/testinfra/staging/conftest.py --- a/molecule/testinfra/staging/conftest.py +++ b/molecule/testinfra/staging/conftest.py @@ -31,50 +31,9 @@ def securedrop_import_testinfra_vars(hostname, with_header=False): if with_header: hostvars = dict(securedrop_test_vars=hostvars) - if os.environ.get("FPF_CI", False): - export_ci_var_overrides() - return hostvars -def export_ci_var_overrides(): - """ - In CI environments, the hardcoded local IP addresses aren't valid - (since we're testing against remote AWS hosts). Detect the CI env, - and look up the proper IPs for use in the testinfra tests. - Expose those IPs as environment variables, so the tests can use them. - """ - molecule_info = lookup_molecule_info() - app_ip = lookup_aws_private_address(molecule_info, 'app-staging') - mon_ip = lookup_aws_private_address(molecule_info, 'mon-staging') - - os.environ['APP_IP'] = app_ip - os.environ['MON_IP'] = mon_ip - - # Make SSH calls more resilient, as we're operating against remote hosts, - # and running from CI. We've observed flakey connections in CI at times. - os.environ['ANSIBLE_SSH_RETRIES'] = '5' - ssh_args = [ - "-o ConnectTimeout=60s", - "-o ControlMaster=auto", - "-o ControlPersist=180s", - "-o StrictHostKeyChecking=no", - ] - os.environ['ANSIBLE_SSH_ARGS'] = " ".join(ssh_args) - - -def lookup_aws_private_address(molecule_info, hostname): - """ - Inspect Molecule instance config dict (imported from YAML file), - and return the attribute for the requested hostname. - """ - - host_info = list(filter(lambda x: x['instance'] == hostname, - molecule_info))[0] - host_ip = host_info['priv_address'] - return host_ip - - def lookup_molecule_info(): """ Molecule automatically writes YAML files documenting dynamic host info
diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst --- a/docs/development/testing_continuous_integration.rst +++ b/docs/development/testing_continuous_integration.rst @@ -14,9 +14,9 @@ the scripts in ``devops/``. You may want to consult the to interpret the configuration file. Review the ``workflows`` section of the configuration file to understand which jobs are run by CircleCI. -The files under ``devops/`` are used to create a minimized staging environment -on AWS EC2. The CircleCI host is used as the Ansible controller to provision the -machines and run the :ref:`tests <config_tests>` against them. +The files under ``devops/`` are used to create a libvirt-compatible environment on GCE. +The GCE host is used as the Ansible controller, mimicking a developer's laptop, +to provision the machines and run the :ref:`tests <config_tests>` against them. .. note:: We skip unnecessary jobs, such as the staging run, for pull requests that only affect the documentation; to do so, we check whether the branch name begins with @@ -33,17 +33,6 @@ machines and run the :ref:`tests <config_tests>` against them. branch. Once your branch is in a PR, you can rebuild, push an additional commit, or manually rebase your branch to update the CI results. -Limitations of the CI Staging Environment ------------------------------------------ -Our CI staging environment is currently directly provisioned to Xen-based -virtual machines running on AWS, due to the lack of support for nested -virtualization. This means that we cannot use the ``grsecurity`` kernel, and the -environment differs from our locally run staging VMs in other ways. We may be -able to overcome these limitations by -`transitioning <https://github.com/freedomofpress/securedrop/issues/3702>`__ to -running the CI staging environment on compute infrastructure that supports -nested virtualization. - Running the CI Staging Environment ---------------------------------- @@ -53,12 +42,12 @@ of the ``freedomofpress`` GitHub organization). The tests also perform basic linting and validation, like checking for formatting errors in the Sphinx documentation. -.. tip:: You will need an Amazon Web Services EC2 account to proceed. - See the `AWS Getting Started Guide`_ for detailed instructions. +.. tip:: You will need a Google Cloud Platform account to proceed. + See the `Google Cloud Platform Getting Started Guide`_ for detailed instructions. -.. _AWS Getting Started Guide: https://aws.amazon.com/ec2/getting-started/ +.. _Google Cloud Platform Getting Started Guide: https://cloud.google.com/getting-started/ -In addition to an EC2 account, you will need a working `Docker installation`_ in +In addition to a GCP account, you will need a working `Docker installation`_ in order to run the container that builds the deb packages. You can verify that your Docker installation is working by running @@ -82,11 +71,12 @@ Source the setup script using the following command: .. code:: sh - source ./devops/scripts/local-setup.sh + source ./devops/gce-nested/ci-env.sh You will be prompted for the values of the required environment variables. There -are some defaults set that you may want to change. You will need to determine -the value of your VPC ID to use which is outside the scope of this guide. +are some defaults set that you may want to change. You will need to export +``GOOGLE_CREDENTIALS`` with authentication details for your GCP account, +which is outside the scope of this guide. Use Makefile to Provision Hosts ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -98,28 +88,18 @@ Run ``make help`` to see the full list of CI commands in the Makefile: $ make help Makefile for developing and testing SecureDrop. Subcommands: - docs: Build project documentation in live reload for editing. - docs-lint: Check documentation for common syntax errors. - ci-spinup: Creates AWS EC2 hosts for testing staging environment. - ci-teardown: Destroy AWS EC2 hosts for testing staging environment. - ci-run: Provisions AWS EC2 hosts for testing staging environment. - ci-test: Tests AWS EC2 hosts for testing staging environment. - ci-go: Creates, provisions, tests, and destroys AWS EC2 hosts - for testing staging environment. - ci-debug: Prevents automatic destruction of AWS EC2 hosts on error. + ci-go Creates, provisions, tests, and destroys GCE host for testing staging environment. + ci-lint Runs linting in linting container. + ci-run Provisions GCE host for testing staging environment. + ci-spinup Creates GCE host for testing staging environment. + ci-teardown Destroys GCE host for testing staging environment. To run the tests locally: .. code:: sh - make ci-debug # hosts will not be destroyed automatically make ci-go You can use ``make ci-run`` to provision the remote hosts while making changes, including rebuilding the Debian packages used in the Staging environment. See :doc:`virtual_environments` for more information. - -Note that if you typed ``make ci-debug`` above, you will have to manually remove -a blank file in ``${HOME}/.FPF_CI_DEBUG`` and then run ``make ci-teardown`` to -bring down the CI environment. Otherwise, specifically for AWS, you will be -charged hourly charges until those machines are terminated. diff --git a/molecule/aws/securedrop_test.pub b/molecule/aws/securedrop_test.pub deleted file mode 100644 --- a/molecule/aws/securedrop_test.pub +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN PGP PUBLIC KEY BLOCK----- - -mQENBFhPGZsBCACzn00s3+i5HdGIldDGYXxY2HKL9Qhk0DhiRrNPaQemhNijuFlC -geCeKN/smDAUyM5mfEoxmWy3V7n8SEQUpqI4dIS2AohReLkyKEKiIpTuXW7F9kO3 -vcXHgrTka+8B4ZQxDuTHNFJLmBwJnP24LrL6BzkDIUNeQFwM0EFTDOJlW1QV6qkm -9WGizo2sR0VBJJabfRWrTWd8llYOVcc+LptErVNADPaX6iqb+QnZVJ/nYmCTgABj -lD3aZ4EPZ+ioVOcOxbgBkAX76COObUUw/XahBGwj4fJ5kyzvDSBCHHlRzN39LKpM -Y+HfSc1scAOWN+Dd0N/joIa0j0U4SGHo1NdzABEBAAG0MVNlY3VyZURyb3AgVEVT -VElORyBrZXkgPHNlY3VyZWRyb3BAZnJlZWRvbS5wcmVzcz6JAU4EEwEIADgWIQRO -15zDNi19EoNwRgJKO+SpIhGwPAUCWE8ZmwIbAwULCQgHAgYVCAkKCwIEFgIDAQIe -AQIXgAAKCRBKO+SpIhGwPCb9B/9SuVoxbe3nLlU0bHDQtoq5P7adyTZK+5gKIiAo -mtAkc/EuiF6jYIDLo+DBB1GBJVjyD5igTt14XR3JpMe6nLtztD5zgGk47gYQk3y5 -6f5ydd7zRo9OxulRYDvU1mXMUc0EmqfzuSxY55HJy5KQvjeKIU0fTvwbPYXdhFCC -42iyBIkp4e4/C5oO4lNrNY2DJEZ+a8H5LHasJ4g9A78f/D5q0HWO1HutzfDeiMvq -WFwlGMD2OzTEQA2MGlVRIYvLHAG1aV9fXY8kjCFT8ri5hxlQeTkKISfbW3pFSq6s -Ow4r975zWLTPJNm+WTbBpfIOFBVAW34EHkcb/QmntlvqkNM+uQENBFhPGZsBCAC4 -VEtCQEuZ3WzCNL/0yQFih1EjT/AsS3j3++xvSOYWF+c7AjR9X0MkJFTnUZBHs6MX -PM33bbkWbBBE2ILdDCEF72Uc5HyyC2lW2DvPY9ZLVSGcMCUsKARv5rbeNdgiLVP5 -8AMkmG48q0Pxrr6UVX14M34Jm5G91c/dj9zHtVwkLg4RG/rcumQdlpQhNmMycB2X -lat48atmEkutfLEQizXIlgiCdNEpgfUBy/jZZcCOjwr8PUPmSUWjKOVMv6CSLx8K -z2cP4We7tyq4qhc0cWjJOWOmJpu5tbmi6XEEWGaIJyN+POhHEcb0tI1rTJ88nrMb -DI/NF/35kuWIIkADOb2vABEBAAGJATYEGAEIACAWIQRO15zDNi19EoNwRgJKO+Sp -IhGwPAUCWE8ZmwIbDAAKCRBKO+SpIhGwPC3fB/0TfuScS718FiEcVRI3F2wBbzTQ -VARhGzEvPSU5Z3Cur/EB8ihpWvwi39tUMeg5HTheDl/8A7f1QCjIFSVEr1slGNLh -YFF07XGWhy837z6kiihK2z6/w6Q9QJqjE+QVZCKr97aIPejvEoHoslZTU5pJ52qF -J7KQd1hEvVs00DxY6VlyK0FzXqByKYq6Arl2tzlCZ6RPEHKXV2xSP06jLEagzgYe -DylVo9Xahenj4n/Mtq7Am6tGgU9Vy9cGbWNBdUND/mFQEEZSh9RJabPeluH12sir -5/tfsDr4DGHSz7ws+5M6Zbk6oNJEwQZ4cR+81qCfXE5X5LW1KlAL8wDl7dfS -=fYUi ------END PGP PUBLIC KEY BLOCK----- \ No newline at end of file diff --git a/molecule/aws/tests/test_tor_interfaces.py b/molecule/aws/tests/test_tor_interfaces.py deleted file mode 100644 --- a/molecule/aws/tests/test_tor_interfaces.py +++ /dev/null @@ -1,35 +0,0 @@ -import io -import os -import re -import pytest - -TOR_URL_FILES = [{'file': 'app-source-ths', - 'check_string': 'SUBMIT DOCUMENTS', - 'error_string': "ERROR"}] - -testinfra_hosts = ["docker://apptestclient"] - - [email protected]('site', TOR_URL_FILES) -def test_www(host, site): - """ - Ensure tor interface is reachable and returns expected content. - """ - - # Extract Onion URL from saved onion file, fetched back from app-staging. - onion_url_filepath = os.path.join( - os.path.dirname(__file__), - "../../../install_files/ansible-base/{}".format(site['file']) - ) - onion_url_raw = io.open(onion_url_filepath, 'r').read() - onion_url = re.search("\w+\.onion", onion_url_raw).group() - - # Fetch Onion URL via curl to confirm interface is rendered correctly. - curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( - onion_url) - curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) - - site_scrape = host.check_output(curl_tor) - assert host.check_output(curl_tor_status) == "200" - assert site['check_string'] in site_scrape - assert site['error_string'] not in site_scrape diff --git a/molecule/aws/tor_apt_test.yml b/molecule/aws/tor_apt_test.yml deleted file mode 100644 --- a/molecule/aws/tor_apt_test.yml +++ /dev/null @@ -1,26 +0,0 @@ ---- -- name: Add apt SD test public key - apt_key: - data: "{{ lookup('file','securedrop_test.pub') }}" - state: present - -- name: Force possible tor update - meta: flush_handlers - -- name: Squash testinfra failure for packages needing update - apt: - upgrade: safe - -- name: Extract latest tor version - shell: | - apt-cache policy tor | sed -e 's/^\s*Installed:\ \(\S*\)/\1/g;tx;d;:x' - changed_when: false - register: extract_tor_version - -- name: Dump Tor version to file (for reporting) - copy: - dest: "{{ playbook_dir }}/../../.tor_version" - content: "{{ extract_tor_version.stdout }}" - delegate_to: localhost - run_once: true - become: "no" diff --git a/molecule/builder/tests/test_security_updates.py b/molecule/builder/tests/test_security_updates.py --- a/molecule/builder/tests/test_security_updates.py +++ b/molecule/builder/tests/test_security_updates.py @@ -1,11 +1,6 @@ -import os -import pytest - testinfra_hosts = ['docker://trusty-sd-sec-update'] [email protected](os.environ.get("FPF_CI", "false") == "true", - reason="Skip in CI, only fail this test locally") def test_ensure_no_updates_avail(host): """ Test to make sure that there are no security-updates in the diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -1,5 +1,4 @@ import pytest -import os import re @@ -16,8 +15,6 @@ def test_ssh_motd_disabled(File): assert not f.contains("pam\.motd") [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize("package", [ 'intel-microcode', 'linux-firmware-image-{}-grsec'.format(KERNEL_VERSION), @@ -34,8 +31,6 @@ def test_grsecurity_apt_packages(Package, package): assert Package(package).is_installed [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize("package", [ 'linux-signed-image-generic-lts-utopic', 'linux-signed-image-generic', @@ -61,8 +56,6 @@ def test_generic_kernels_absent(Command, package): assert c.stderr == error_text [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") def test_grsecurity_lock_file(File): """ Ensure system is rerunning a grsecurity kernel by testing for the @@ -74,8 +67,6 @@ def test_grsecurity_lock_file(File): assert f.size == 0 [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") def test_grsecurity_kernel_is_running(Command): """ Make sure the currently running kernel is specific grsec kernel. @@ -85,8 +76,6 @@ def test_grsecurity_kernel_is_running(Command): assert c.stdout == '{}-grsec'.format(KERNEL_VERSION) [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize('sysctl_opt', [ ('kernel.grsecurity.grsec_lock', 1), ('kernel.grsecurity.rwxmap_logging', 0), @@ -101,8 +90,6 @@ def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt): assert Sysctl(sysctl_opt[0]) == sysctl_opt[1] [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize('paxtest_check', [ "Executable anonymous mapping", "Executable bss", @@ -137,8 +124,6 @@ def test_grsecurity_paxtest(Command, Sudo, paxtest_check): assert re.search(regex, c.stdout) [email protected](os.environ.get('FPF_CI', 'false') == "true", - reason="Not needed in CI environment") def test_grub_pc_marked_manual(Command): """ Ensure the `grub-pc` packaged is marked as manually installed. @@ -149,8 +134,6 @@ def test_grub_pc_marked_manual(Command): assert c.stdout == "grub-pc" [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") def test_apt_autoremove(Command): """ Ensure old packages have been autoremoved. @@ -160,8 +143,8 @@ def test_apt_autoremove(Command): assert "The following packages will be REMOVED" not in c.stdout [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") [email protected](strict=True, + reason="PaX flags unset at install time, see issue #3916") @pytest.mark.parametrize("binary", [ "/usr/sbin/grub-probe", "/usr/sbin/grub-mkdevicemap", diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -1,4 +1,3 @@ -import os import re @@ -72,8 +71,6 @@ def test_sudoers_tmux_env_deprecated(File): """ admin_user = "vagrant" - if os.environ.get("FPF_CI", None): - admin_user = "sdrop" f = File("/home/{}/.bashrc".format(admin_user)) assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$")
Switch CircleCI staging environment to GCE + libvirt VMs ## Description Our staging environment is currently directly provisioned to Xen-based VMs running on AWS, due to the lack of support for nested virtualization. That means we can't use the `grsecurity` kernel, and the environment differs from our locally run staging VMs in other ways. Moreover, we can't add support for new tests such as upgrade testing (#1689). We've previously considered workarounds (see #2892), but we now have established that we can use GCE's nested-virtualization capabilities (see #3452). The next logical step will be to switch the staging environment spun up by CircleCI to GCE running our libvirt-based staging VMs. ## User Stories As a developer, I want CI results to reflect the preferred staging environment as much as possible, so that bugs and errors are caught early in development.
In the current sprint (which ends BOD PDT 10/17), @msheiny will add the GCE configuration as an infrastructure prerequisite for this, which is tracked in our infra repository. For the next sprint, we will likely aim to transition the existing tests to staging VMs run in GCE via nested virtualization (still managed via CircleCI), giving us the immediate benefit of grsecurity kernels. For a future sprint, we'll then move on to automated upgrade testing (#1689). For this current sprint (10/17-10/31), the top level description describes the scope what we're targeting; @msheiny will drive with @conorsch likely being the primary reviewer.
2018-10-30T19:11:33Z
[]
[]
freedomofpress/securedrop
3,917
freedomofpress__securedrop-3917
[ "3915" ]
18f9063815fc71e7565fa4c1a66106f64c3926eb
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -244,7 +244,8 @@ def all_source_replies(source_uuid): db.session.add(reply) db.session.add(source) db.session.commit() - return jsonify({'message': 'Your reply has been stored'}), 201 + return jsonify({'message': 'Your reply has been stored', + 'uuid': reply.uuid}), 201 @api.route('/sources/<source_uuid>/replies/<reply_uuid>', methods=['GET', 'DELETE'])
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -4,6 +4,7 @@ import os from pyotp import TOTP +from uuid import UUID from flask import current_app, url_for from itsdangerous import TimedJSONWebSignatureSerializer @@ -641,10 +642,14 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, headers=get_api_headers(journalist_api_token)) assert response.status_code == 201 - with journalist_app.app_context(): # Now verify everything was saved. - # Get most recent reply in the database - reply = Reply.query.order_by(Reply.id.desc()).first() + # ensure the uuid is present and valid + reply_uuid = UUID(response.json['uuid']) + + # check that the uuid has a matching db object + reply = Reply.query.filter_by(uuid=str(reply_uuid)).one_or_none() + assert reply is not None + with journalist_app.app_context(): # Now verify everything was saved. assert reply.journalist_id == test_journo['id'] assert reply.source_id == source_id
Posting a Reply should return a UUID ## Description The API should not return 204, but should return 200 with an object containing the UUID of the reply. Without doing that, there is no way for a client to sanely sync its local copy of a conversation with the server's copy.
2018-11-02T12:32:04Z
[]
[]
freedomofpress/securedrop
3,919
freedomofpress__securedrop-3919
[ "3918" ]
18f9063815fc71e7565fa4c1a66106f64c3926eb
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -1,9 +1,10 @@ -from datetime import datetime, timedelta -from functools import wraps import json -from werkzeug.exceptions import default_exceptions # type: ignore +from datetime import datetime, timedelta from flask import abort, Blueprint, current_app, jsonify, request +from functools import wraps +from os import path +from werkzeug.exceptions import default_exceptions # type: ignore from db import db from journalist_app import utils @@ -238,9 +239,10 @@ def all_source_replies(source_uuid): return jsonify( {'message': 'You must encrypt replies client side'}), 400 - reply = Reply(user, source, - current_app.storage.path(source.filesystem_id, - filename)) + # issue #3918 + filename = path.basename(filename) + + reply = Reply(user, source, filename) db.session.add(reply) db.session.add(source) db.session.commit()
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -648,6 +648,9 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, assert reply.journalist_id == test_journo['id'] assert reply.source_id == source_id + # regression test for #3918 + assert '/' not in reply.filename + source = Source.query.get(source_id) expected_filename = '{}-{}-reply.gpg'.format(
API repsonse for a `Reply` has full system path for `filename` ## Description The filename of a reply to a source should only be the `$(basename $path)` and not just `$path`. ## Steps to Reproduce Boot the dev env, reply via the API, fetch all replies from the API. ## Expected Behavior The response does not contain `/var/lib/securedrop/store/$filesystem_id`. ## Actual Behavior ```json $ http localhost:8081/api/v1/replies HTTP/1.0 200 OK Content-Length: 4438 Content-Type: application/json Date: Fri, 02 Nov 2018 13:58:11 GMT Server: Werkzeug/0.14.1 Python/2.7.6 Set-Cookie: js=eyJleHBpcmVzIjp7IiBkIjoiRnJpLCAwMiBOb3YgMjAxOCAxNTo1ODoxMSBHTVQifX0.Dr3pcw.p_VpbLQcvnnVzg_jf6MTp9gZ8VI; HttpOnly; Path=/ Vary: Cookie { "replies": [ ... { "filename": "/var/lib/securedrop/store/V7ND6HFB3ZDSWS647VA2HPTTTOKD6M433M6ZSZOKTQTQAU7RWTWS5YU62F4WIASHUYTDAYW3HZVNORQAN3QKZVBZRELYCV3NUVARL6Y=/5-fourteen_recitative-reply.gpg", "is_deleted_by_source": false, "journalist_username": "journalist", "journalist_uuid": "3328e4e8-4037-437b-88d9-a0aa270090cb", "reply_url": "/api/v1/sources/c81a50ea-4eeb-4422-ba51-9bc11e598b14/replies/8b97e43d-e84d-412b-b7e2-1fccfa0fda17", "size": 906, "source_url": "/api/v1/sources/c81a50ea-4eeb-4422-ba51-9bc11e598b14", "uuid": "8b97e43d-e84d-412b-b7e2-1fccfa0fda17" }, ... ] } ```
2018-11-02T14:30:12Z
[]
[]
freedomofpress/securedrop
3,940
freedomofpress__securedrop-3940
[ "2332" ]
95c97ac906b15de9b626cca14bc3bdc19c644089
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -84,7 +84,7 @@ def add_user(): else: flash(gettext("An error occurred saving this user" " to the database." - " Please inform your administrator."), + " Please inform your admin."), "error") current_app.logger.error("Adding user " "'{}' failed: {}".format( diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py --- a/securedrop/journalist_app/decorators.py +++ b/securedrop/journalist_app/decorators.py @@ -12,7 +12,7 @@ def admin_required(func): def wrapper(*args, **kwargs): if logged_in() and g.user.is_admin: return func(*args, **kwargs) - flash(gettext("Only administrators can access this page."), + flash(gettext("Only admins can access this page."), "notification") return redirect(url_for('main.index')) return wrapper diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -117,7 +117,7 @@ def reply(): except Exception as exc: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") # We take a cautious approach to logging here because we're dealing # with responses to sources. It's possible the exception message # could contain information we don't want to write to disk. diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -47,7 +47,7 @@ def commit_account_changes(user): except Exception as e: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") current_app.logger.error("Account changes for '{}' failed: {}" .format(user, e)) db.session.rollback() @@ -137,7 +137,7 @@ def validate_hotp_secret(user, otp_secret): else: flash(gettext( "An unexpected error occurred! " - "Please inform your administrator."), "error") + "Please inform your admin."), "error") current_app.logger.error( "set_hotp_secret '{}' (id {}) failed: {}".format( otp_secret, user.id, e)) diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -219,7 +219,7 @@ def _get_delete_confirmation(user): def delete_user(args): - """Deletes a journalist or administrator from the application.""" + """Deletes a journalist or admin from the application.""" with app_context(): username = _get_username_to_delete() try:
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -377,7 +377,7 @@ def _edit_account(self): # There's no field to change your username. with pytest.raises(NoSuchElementException): self.driver.find_element_by_css_selector('#username') - # There's no checkbox to change the administrator status of your + # There's no checkbox to change the admin status of your # account. with pytest.raises(NoSuchElementException): self.driver.find_element_by_css_selector('#is-admin') @@ -410,7 +410,7 @@ def _edit_user(self, username): # out with the user's username. username_field = self.driver.find_element_by_css_selector('#username') assert username_field.get_attribute('placeholder') == username - # There's a checkbox to change the administrator status of the user and + # There's a checkbox to change the admin status of the user and # it's already checked appropriately to reflect the current status of # our user. username_field = self.driver.find_element_by_css_selector('#is-admin') diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -120,7 +120,7 @@ def test_reply_error_flashed_message(journalist_app, test_journo, test_source): ins.assert_message_flashed( 'An unexpected error occurred! Please ' - 'inform your administrator.', 'error') + 'inform your admin.', 'error') def test_empty_replies_are_rejected(journalist_app, test_journo, test_source): @@ -772,7 +772,7 @@ def test_admin_resets_user_hotp_error(mocker, app.post(url_for('admin.reset_two_factor_hotp'), data=dict(uid=test_journo['id'], otp_secret=bad_secret)) ins.assert_message_flashed("An unexpected error occurred! " - "Please inform your administrator.", + "Please inform your admin.", "error") # Re-fetch journalist to get fresh DB instance @@ -872,7 +872,7 @@ def test_user_resets_user_hotp_error(mocker, data=dict(otp_secret=bad_secret)) ins.assert_message_flashed( "An unexpected error occurred! Please inform your " - "administrator.", "error") + "admin.", "error") # Re-fetch journalist to get fresh DB instance user = Journalist.query.get(test_journo['id']) @@ -1140,7 +1140,7 @@ def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): is_admin=None)) ins.assert_message_flashed( "An error occurred saving this user to the database." - " Please inform your administrator.", + " Please inform your admin.", "error") log_event = mocked_error_logger.call_args[0][0]
Use either "Admin" or "Administrator" but not both # Feature request ## Description The apps / docs should only use "admin" or "administrator" since it's possible these don't map nicely in other languages. Also for consistency.
My suggestion is to use the word *admin* through out the docs, as the UI already has that word. May I submit a docs patch? :smiley: Feel free! :smile: #2497 handled the docs side, but I'm reopening this ticket for making changes in the application itself @redshiftzero If the only immediate goal is to replace instances of "Administrator(s)/administrator(s)" with "Admin(s)/admin(s)" in the codebase, a command line utility can be used, although it can be risky. Is this supposed to be a manual process or an automated one? @aydwi it is definitely better to do it manually. A tool can be used and then manual verification though.
2018-11-24T22:05:02Z
[]
[]
freedomofpress/securedrop
3,941
freedomofpress__securedrop-3941
[ "3876" ]
fe62e35cac70224d27b63fdf7f53c5be5befe716
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -16,7 +16,8 @@ from crypto_util import CryptoUtil from db import db from journalist_app import account, admin, api, main, col -from journalist_app.utils import get_source, logged_in +from journalist_app.utils import (get_source, logged_in, + JournalistInterfaceSessionInterface) from models import Journalist from store import Storage @@ -40,6 +41,7 @@ def create_app(config): app.config.from_object(config.JournalistInterfaceFlaskConfig) app.sdconfig = config + app.session_interface = JournalistInterfaceSessionInterface() csrf = CSRFProtect(app) Environment(app) diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -2,7 +2,7 @@ from datetime import datetime from flask import (g, flash, current_app, abort, send_file, redirect, url_for, - render_template, Markup) + render_template, Markup, sessions, request) from flask_babel import gettext, ngettext import hashlib from sqlalchemy.sql.expression import false @@ -337,3 +337,16 @@ def serve_file_with_etag(source, filename): response.headers['Etag'] = '"sha256:{}"'.format( hashlib.sha256(response.get_data()).hexdigest()) return response + + +class JournalistInterfaceSessionInterface( + sessions.SecureCookieSessionInterface): + """A custom session interface that skips storing sessions for api requests but + otherwise just uses the default behaviour.""" + def save_session(self, app, session, response): + # If this is an api request do not save the session + if request.path.split("/")[1] == "api": + return + else: + super(JournalistInterfaceSessionInterface, self).save_session( + app, session, response)
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1959,3 +1959,12 @@ def test_source_with_null_last_updated(journalist_app, test_journo['otp_secret']) resp = app.get(url_for('main.index')) assert resp.status_code == 200 + + +def test_does_set_cookie_headers(journalist_app, test_journo): + with journalist_app.test_client() as app: + response = app.get(url_for('main.login')) + + observed_headers = response.headers + assert 'Set-Cookie' in observed_headers.keys() + assert 'Cookie' in observed_headers['Vary'] diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -850,3 +850,13 @@ def test_set_reply_uuid(journalist_app, journalist_api_token, test_source): data=json.dumps(req_data), headers=get_api_headers(journalist_api_token)) assert resp.status_code == 400 + + +def test_api_does_not_set_cookie_headers(journalist_app, test_journo): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_endpoints')) + + observed_headers = response.headers + assert 'Set-Cookie' not in observed_headers.keys() + if 'Vary' in observed_headers.keys(): + assert 'Cookie' not in observed_headers['Vary']
Journalist API responses include Set-Cookie/Vary headers ## Description The API includes the `Set-Cookie` header and `Vary: Cookie` when they should not as they are not relevant to accessing the API. ## Steps to Reproduce In one terminal: `make dev`. In another: `http HEAD localhost:8081/api/v1` ## Expected Behavior ```http HTTP/1.0 200 OK Content-Length: 198 Content-Type: application/json Date: Mon, 15 Oct 2018 13:06:32 GMT Server: Werkzeug/0.14.1 Python/2.7.6 ``` ## Actual Behavior ```http HTTP/1.0 200 OK Content-Length: 198 Content-Type: application/json Date: Mon, 15 Oct 2018 13:06:32 GMT Server: Werkzeug/0.14.1 Python/2.7.6 Set-Cookie: js=eyJleHBpcmVzIjp7IiBkIjoiTW9uLCAxNSBPY3QgMjAxOCAxNTowNjozMiBHTVQifX0.DqYiWA.-hH2qepnrLDYF8HV79vsn29TfhY; HttpOnly; Path=/ Vary: Cookie ``` ## Comments I have tried implementing and override to `SecureSessionInterface` that doesn't set a cookie if the endpoint matches the API endpoint, but... no dice so far.
@heartsucker are you still working on this or i can give it a try? I'm not actively working on it, so if you'd like to give it a try that'd be very helpful! batman on it. I am not able to give it as much time as i thought i would. So, if anyone wants to take this, they are free to
2018-11-24T22:06:49Z
[]
[]
freedomofpress/securedrop
3,958
freedomofpress__securedrop-3958
[ "3957" ]
1978a677e78cac09172068e74137cef5558da04e
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -3,7 +3,9 @@ from datetime import datetime, timedelta from flask import abort, Blueprint, current_app, jsonify, request from functools import wraps +from sqlalchemy.exc import IntegrityError from os import path +from uuid import UUID from werkzeug.exceptions import default_exceptions # type: ignore from db import db @@ -243,9 +245,27 @@ def all_source_replies(source_uuid): filename = path.basename(filename) reply = Reply(user, source, filename) - db.session.add(reply) - db.session.add(source) - db.session.commit() + + reply_uuid = data.get('uuid', None) + if reply_uuid is not None: + # check that is is parseable + try: + UUID(reply_uuid) + except ValueError: + abort(400, "'uuid' was not a valid UUID") + reply.uuid = reply_uuid + + try: + db.session.add(reply) + db.session.add(source) + db.session.commit() + except IntegrityError as e: + db.session.rollback() + if 'UNIQUE constraint failed: replies.uuid' in str(e): + abort(409, 'That UUID is already in use.') + else: + raise e + return jsonify({'message': 'Your reply has been stored', 'uuid': reply.uuid}), 201
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -2,9 +2,10 @@ import hashlib import json import os +import random from pyotp import TOTP -from uuid import UUID +from uuid import UUID, uuid4 from flask import current_app, url_for from itsdangerous import TimedJSONWebSignatureSerializer @@ -15,6 +16,8 @@ os.environ['SECUREDROP_ENV'] = 'test' # noqa from utils.api_helper import get_api_headers +random.seed('◔ ⌣ ◔') + def test_unauthenticated_user_gets_all_endpoints(journalist_app): with journalist_app.test_client() as app: @@ -792,3 +795,58 @@ def test_empty_json_20X(journalist_app, journalist_api_token, test_journo, headers=get_api_headers(journalist_api_token)) assert response.status_code in (200, 201) + + +def test_set_reply_uuid(journalist_app, journalist_api_token, test_source): + msg = '-----BEGIN PGP MESSAGE-----\nwat\n-----END PGP MESSAGE-----' + reply_uuid = str(uuid4()) + req_data = {'uuid': reply_uuid, 'reply': msg} + + with journalist_app.test_client() as app: + # first check that we can set a valid UUID + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 201 + assert resp.json['uuid'] == reply_uuid + + reply = Reply.query.filter_by(uuid=reply_uuid).one_or_none() + assert reply is not None + + len_of_replies = len(Source.query.get(test_source['id']).replies) + + # next check that requesting with the same UUID does not succeed + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 409 + + new_len_of_replies = len(Source.query.get(test_source['id']).replies) + + assert new_len_of_replies == len_of_replies + + # check setting null for the uuid field doesn't break + req_data['uuid'] = None + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 201 + + new_uuid = resp.json['uuid'] + reply = Reply.query.filter_by(uuid=new_uuid).one_or_none() + assert reply is not None + + # check setting invalid values for the uuid field doesn't break + req_data['uuid'] = 'not a uuid' + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 400
allow clients to set UUID for a reply ## Description Currently we return the UUID of a reply after the reply has been stored. In order for clients with spotty connectivity to be able to store a reply locally and then confirm it has been stored by the server, they need to have some sort of ID that is sent to the server and then returned. We might as well make this the UUID. ## User Stories As a developer working on the client, I want to be able to send a bunch of replies over a very slow / spotty connection and know which ones were stored and which ones failed.
2018-12-04T09:00:53Z
[]
[]
freedomofpress/securedrop
3,973
freedomofpress__securedrop-3973
[ "3972" ]
a06efb7403d4b66d26b986bee4b94a3dcbee1b22
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -84,7 +84,7 @@ def add_user(): else: flash(gettext("An error occurred saving this user" " to the database." - " Please inform your administrator."), + " Please inform your admin."), "error") current_app.logger.error("Adding user " "'{}' failed: {}".format( diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py --- a/securedrop/journalist_app/decorators.py +++ b/securedrop/journalist_app/decorators.py @@ -12,7 +12,7 @@ def admin_required(func): def wrapper(*args, **kwargs): if logged_in() and g.user.is_admin: return func(*args, **kwargs) - flash(gettext("Only administrators can access this page."), + flash(gettext("Only admins can access this page."), "notification") return redirect(url_for('main.index')) return wrapper diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -117,7 +117,7 @@ def reply(): except Exception as exc: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") # We take a cautious approach to logging here because we're dealing # with responses to sources. It's possible the exception message # could contain information we don't want to write to disk. diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -47,7 +47,7 @@ def commit_account_changes(user): except Exception as e: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") current_app.logger.error("Account changes for '{}' failed: {}" .format(user, e)) db.session.rollback() @@ -137,7 +137,7 @@ def validate_hotp_secret(user, otp_secret): else: flash(gettext( "An unexpected error occurred! " - "Please inform your administrator."), "error") + "Please inform your admin."), "error") current_app.logger.error( "set_hotp_secret '{}' (id {}) failed: {}".format( otp_secret, user.id, e)) diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -219,7 +219,7 @@ def _get_delete_confirmation(user): def delete_user(args): - """Deletes a journalist or administrator from the application.""" + """Deletes a journalist or admin from the application.""" with app_context(): username = _get_username_to_delete() try:
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -377,7 +377,7 @@ def _edit_account(self): # There's no field to change your username. with pytest.raises(NoSuchElementException): self.driver.find_element_by_css_selector('#username') - # There's no checkbox to change the administrator status of your + # There's no checkbox to change the admin status of your # account. with pytest.raises(NoSuchElementException): self.driver.find_element_by_css_selector('#is-admin') @@ -410,7 +410,7 @@ def _edit_user(self, username): # out with the user's username. username_field = self.driver.find_element_by_css_selector('#username') assert username_field.get_attribute('placeholder') == username - # There's a checkbox to change the administrator status of the user and + # There's a checkbox to change the admin status of the user and # it's already checked appropriately to reflect the current status of # our user. username_field = self.driver.find_element_by_css_selector('#is-admin') diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -120,7 +120,7 @@ def test_reply_error_flashed_message(journalist_app, test_journo, test_source): ins.assert_message_flashed( 'An unexpected error occurred! Please ' - 'inform your administrator.', 'error') + 'inform your admin.', 'error') def test_empty_replies_are_rejected(journalist_app, test_journo, test_source): @@ -772,7 +772,7 @@ def test_admin_resets_user_hotp_error(mocker, app.post(url_for('admin.reset_two_factor_hotp'), data=dict(uid=test_journo['id'], otp_secret=bad_secret)) ins.assert_message_flashed("An unexpected error occurred! " - "Please inform your administrator.", + "Please inform your admin.", "error") # Re-fetch journalist to get fresh DB instance @@ -872,7 +872,7 @@ def test_user_resets_user_hotp_error(mocker, data=dict(otp_secret=bad_secret)) ins.assert_message_flashed( "An unexpected error occurred! Please inform your " - "administrator.", "error") + "admin.", "error") # Re-fetch journalist to get fresh DB instance user = Journalist.query.get(test_journo['id']) @@ -1140,7 +1140,7 @@ def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): is_admin=None)) ins.assert_message_flashed( "An error occurred saving this user to the database." - " Please inform your administrator.", + " Please inform your admin.", "error") log_event = mocked_error_logger.call_args[0][0]
[0.11.0] update strings from between feature and string freeze ## Description Due to the scenario described in #3971, we should update the release branch with string changes from `develop` between feature and string freeze. We updated Weblate already from develop and asked translators to translate these strings, so the action to minimize disruption to translators is to include these string change in the release. It is very low risk since the only changes are modifications without updates to the actual functionality. The implementor of this ticket should double check that there are no other string changes in the `develop` branch, and otherwise this change should be incorporated into the release branch: - https://github.com/freedomofpress/securedrop/commit/803f13c7881f2b84d6222496331374779d9464eb
2018-12-05T18:29:39Z
[]
[]
freedomofpress/securedrop
3,994
freedomofpress__securedrop-3994
[ "3912" ]
800315537413aad3f8844dd749f04d40946d0d88
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -9,6 +9,7 @@ from random import SystemRandom from base64 import b32encode +from datetime import date from flask import current_app from gnupg._util import _is_stream, _make_binary_stream @@ -43,6 +44,15 @@ class CryptoUtil: GPG_KEY_TYPE = "RSA" DEFAULT_WORDS_IN_RANDOM_ID = 8 + # All reply keypairs will be "created" on the same day SecureDrop (then + # Strongbox) was publicly released for the first time. + # https://www.newyorker.com/news/news-desk/strongbox-and-aaron-swartz + DEFAULT_KEY_CREATION_DATE = date(2013, 5, 14) + + # '0' is the magic value that tells GPG's batch key generation not + # to set an expiration date. + DEFAULT_KEY_EXPIRATION_DATE = '0' + def __init__(self, scrypt_params, scrypt_id_pepper, @@ -170,7 +180,9 @@ def genkeypair(self, name, secret): key_type=self.GPG_KEY_TYPE, key_length=self.__gpg_key_length, passphrase=secret, - name_email=name + name_email=name, + creation_date=self.DEFAULT_KEY_CREATION_DATE.isoformat(), + expire_date=self.DEFAULT_KEY_EXPIRATION_DATE )) def delete_reply_keypair(self, source_filesystem_id):
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from datetime import datetime import io import os import pytest @@ -208,6 +209,59 @@ def test_genkeypair(source_app): assert source_app.crypto_util.getkey(filesystem_id) is not None +def parse_gpg_date_string(date_string): + """Parse a date string returned from `gpg --with-colons --list-keys` into a + datetime. + + The format of the date strings is complicated; see gnupg doc/DETAILS for a + full explanation. + + Key details: + - The creation date of the key is given in UTC. + - the date is usually printed in seconds since epoch, however, we are + migrating to an ISO 8601 format (e.g. "19660205T091500"). A simple + way to detect the new format is to scan for the 'T'. + """ + if 'T' in date_string: + dt = datetime.strptime(date_string, "%Y%m%dT%H%M%S") + else: + dt = datetime.utcfromtimestamp(int(date_string)) + return dt + + +def test_reply_keypair_creation_and_expiration_dates(source_app): + with source_app.app_context(): + codename = source_app.crypto_util.genrandomid() + filesystem_id = source_app.crypto_util.hash_codename(codename) + journalist_filename = source_app.crypto_util.display_id() + source = models.Source(filesystem_id, journalist_filename) + db.session.add(source) + db.session.commit() + source_app.crypto_util.genkeypair(source.filesystem_id, codename) + + # crypto_util.getkey only returns the fingerprint of the key. We need + # the full output of gpg.list_keys() to check the creation and + # expire dates. + # + # TODO: it might be generally useful to refactor crypto_util.getkey so + # it always returns the entire key dictionary instead of just the + # fingerprint (which is always easily extracted from the entire key + # dictionary). + new_key_fingerprint = source_app.crypto_util.getkey(filesystem_id) + new_key = [key for key in source_app.crypto_util.gpg.list_keys() + if new_key_fingerprint == key['fingerprint']][0] + + # All keys should share the same creation date to avoid leaking + # information about when sources first created accounts. + creation_date = parse_gpg_date_string(new_key['date']) + assert (creation_date.date() == + CryptoUtil.DEFAULT_KEY_CREATION_DATE) + + # Reply keypairs should not expire + expire_date = new_key['expires'] + assert expire_date == '' + + def test_delete_reply_keypair(source_app, test_source): fid = test_source['filesystem_id'] source_app.crypto_util.delete_reply_keypair(fid)
Expiration date on PGP key leaks date of source's first contact ## Description When we autogen PGP keys, the expiration is set to 1 year. This can be used to infer the date a source first used SD. We may want to remove this expiration field altogether to get around this issue.
Mind if I take this one? :) Note that GPG keys also embed a [Creation Date](https://www.gnupg.org/documentation/manuals/gnupg-devel/Unattended-GPG-key-generation.html), which defaults to the current system time at the moment the key is generated; however, like the expiration date, it is also customizable.
2018-12-17T05:33:44Z
[]
[]
freedomofpress/securedrop
4,009
freedomofpress__securedrop-4009
[ "4005" ]
7f566f07137848d704186e055871c4a85a60425c
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -205,7 +205,10 @@ def getkey(self, name): def export_pubkey(self, name): fingerprint = self.getkey(name) - return self.gpg.export_keys(fingerprint) + if fingerprint: + return self.gpg.export_keys(fingerprint) + else: + return None def encrypt(self, plaintext, fingerprints, output=None): # Verify the output path
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -278,3 +278,19 @@ def test_delete_reply_keypair_no_key(source_app): def test_getkey(source_app, test_source): assert (source_app.crypto_util.getkey(test_source['filesystem_id']) is not None) + + # check that a non-existent key returns None + assert source_app.crypto_util.getkey('x' * 50) is None + + +def test_export_pubkey(source_app, test_source): + begin_pgp = '-----BEGIN PGP PUBLIC KEY BLOCK----' + + # check that a filesystem_id exports the pubkey + exported = source_app.crypto_util.export_pubkey( + test_source['filesystem_id']) + assert exported.startswith(begin_pgp) + + # check that a non-existent identifer exports None + exported = source_app.crypto_util.export_pubkey('x' * 50) + assert exported is None
using the API to retrieve a source immediately after source creation returns *all* source keys ## Description ## Steps to Reproduce ### Steps to reproduce - boot SecureDrop dev env (server) - retrieve API token as journalist - login to web as source - as source, submit message to web - as journalist, immediately (important! as in < 2 seconds) go `GET /api/v1/sources` ## Expected Behavior Exactly one or zero keys are returned for the new source. ## Actual Behavior All keys are returned for the new source. Example: ```json { "sources": [ { "add_star_url": "/api/v1/sources/d55335b0-5d7a-4151-bc3c-212f344e584a/add_star", "interaction_count": 4, "is_flagged": false, "is_starred": false, "journalist_designation": "rallying zoologist", "key": { "public": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\nmQINBFGRfoABEADJ56nCxMuQeyp/LMhzEyRCQZ6UrYS3KVHtOPz7Q+N/4rWsS+Wu\nMO8X606xVA43JFVJgqeD+a4pcknmw9k0O7WzD+peswLCKulQ0s+yaZjP0G08CQH8\nS3nwG/zoCBxX+FXNnThtN+2u+XhGcKNACzWaiH8KiO9YUSUmiexCP3REBgMLucFF\nTnQK6RUcuVmYYS+VS87hYrEN4F6ANw5EeuENj++a36AJNFrzPO+QAfqZfe8UY64w\niyoXaELKCPKUMq3vka8oR6wUwPGw+053Epmd8BEy/7Ol6BdNbthB0bSrgMtnfEjX\nlU1LuusSPXOTAIXHr63+45oxAJ3MO8qoNXznWbPwElQAR/2xrsy2z20TDuvHCS2f\nrbITmB6oN76505YDFPjc19dXKWUurapQHsTkG1Bv/P/31/g46LA9TrULu+ZN7Dxz\nvvxDEvpPnrjKMjeew21v2w1P29DBhpoZwaxUhpMC7SF4yiEotaUXiZkaxCNkxT/l\n1UPQkS6XhuuGhsEv81BOuibPsI388nRNx5OUaCnXH9FxaFbyJyfmDek9IaymEXjV\nfD9gr1fePG0Wsa94D29nQ/GCegw4UhbDCZD6WX/Fi4kK8SBOOg0fU/5cW0eSZ8bp\nfTEXM++T0mKpbD+pz0QQMJHJuo46hZ7nWGslt7taCmqIPHI0KNNCKYPAfwARAQAB\ntHxBdXRvZ2VuZXJhdGVkIEtleSA8RkdWTUJPWFc2UVBSTDRJMlpBWjZUWktEV0Iy\nUEFNNjY3UE5HQ0tTRVROSEdBR1RVS1NHT1FNT0xJTFVTSk5YR0RZNUZPUjRJQUxP\nQ01TUjJXWE5GWlFKVFU2R01HVkpQR09SR0RQST0+iQI5BBMBCgAjBQJRkX6AAhsv\nBwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQPaif/ttqPFxozg/+LgcbcStF\nBRxSzui0gZI2OEIdhL2ELZRSflhjOx650AfJO1hFZw3gHKrCv2AYwOXRqWhZHpwZ\n67qtkOTbwP2ENs2JNJodzk3CLRXm4YgoCABbH0QFd85osareTKA8qt1dIfzFwHup\nrJRCoc0cOI0vtzyK1fyWxAriRkn+Df2ue4FWJRI3LfvvCccr0+xkfac9RkgAUVIl\n46NyplnfRCwiHH0UkAzXYxOCSZ7NbT63HImZ8pkQifReFgANwljjtMpuNKw33otp\n7FMBK/VyeBcsHj/KhotMLbdmJvQ3Jwur5zT9/jR1F6Bh+JTHk8+sQZtefA9DTpFI\n6mBQl56toCqWMWfXB58tlv/YYb7GPhJI3dhc6UBJ8AHYA3Sgv698hITmAM4Yy+RB\nrGfAVKPpCwKr4wNh58BtHQ4J0oT4cLEgOr03QbnLIfg8EFBkIEfA+GP4/kwVrdAQ\nxqoRaksbWHXRUbXfaoraglYszAB9n004J5B0g0JwSgRRK0RpSrFLgW50bfCX6Nqe\nYTrxVL2tUXlZD8nKKu/tbmVymEgZameAVpR4sSU++9uBU3tsvqqqSR7SAKZDDz2k\nX8ofyvvqPeFXPQA4wyVcP9STgPUfYrn7tlSz6oq/fJHJcSw4TgY/zt7UZZxm0fJQ\ngN8384mxsBfcHYyblx4kMPnjwQ/B5067IuA=\n=3F/H\n-----END PGP PUBLIC KEY BLOCK-----\n", "type": "PGP" }, "last_updated": "2018-12-22T13:00:54.524845Z", "number_of_documents": 0, "number_of_messages": 2, "remove_star_url": "/api/v1/sources/d55335b0-5d7a-4151-bc3c-212f344e584a/remove_star", "replies_url": "/api/v1/sources/d55335b0-5d7a-4151-bc3c-212f344e584a/replies", "submissions_url": "/api/v1/sources/d55335b0-5d7a-4151-bc3c-212f344e584a/submissions", "url": "/api/v1/sources/d55335b0-5d7a-4151-bc3c-212f344e584a", "uuid": "d55335b0-5d7a-4151-bc3c-212f344e584a" }, { "add_star_url": "/api/v1/sources/dc17df20-6459-4317-8b3c-67b51997d3ce/add_star", "interaction_count": 1, "is_flagged": false, "is_starred": false, "journalist_designation": "atomistic leviathan", "key": { "public": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\nmQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t\nwSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU\n+8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM\nKBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB\n0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg\nu3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2\nak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B\n5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf\nCyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7\nhV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf\nyfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB\ntDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP\nRFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS\nm8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r\nFyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/\nJzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r\nq4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn\n5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg\nb1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi\nAsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW\niNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3\ncm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai\nR78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj\nEqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj\nLnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6\n+SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC\nvfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq\n95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf\nT9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls\n9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC\nKDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p\nN4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21\nOcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h\nh4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls\nVr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f\nFwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA\n6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3\ninyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA\nKmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn\nIW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f\n4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK\nn1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ\nBSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF\nz2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI\nk8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS\niphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ\nVu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8mQINBFGRfoABEADJ56nCxMuQeyp/LMhz\nEyRCQZ6UrYS3KVHtOPz7Q+N/4rWsS+WuMO8X606xVA43JFVJgqeD+a4pcknmw9k0\nO7WzD+peswLCKulQ0s+yaZjP0G08CQH8S3nwG/zoCBxX+FXNnThtN+2u+XhGcKNA\nCzWaiH8KiO9YUSUmiexCP3REBgMLucFFTnQK6RUcuVmYYS+VS87hYrEN4F6ANw5E\neuENj++a36AJNFrzPO+QAfqZfe8UY64wiyoXaELKCPKUMq3vka8oR6wUwPGw+053\nEpmd8BEy/7Ol6BdNbthB0bSrgMtnfEjXlU1LuusSPXOTAIXHr63+45oxAJ3MO8qo\nNXznWbPwElQAR/2xrsy2z20TDuvHCS2frbITmB6oN76505YDFPjc19dXKWUurapQ\nHsTkG1Bv/P/31/g46LA9TrULu+ZN7DxzvvxDEvpPnrjKMjeew21v2w1P29DBhpoZ\nwaxUhpMC7SF4yiEotaUXiZkaxCNkxT/l1UPQkS6XhuuGhsEv81BOuibPsI388nRN\nx5OUaCnXH9FxaFbyJyfmDek9IaymEXjVfD9gr1fePG0Wsa94D29nQ/GCegw4UhbD\nCZD6WX/Fi4kK8SBOOg0fU/5cW0eSZ8bpfTEXM++T0mKpbD+pz0QQMJHJuo46hZ7n\nWGslt7taCmqIPHI0KNNCKYPAfwARAQABtHxBdXRvZ2VuZXJhdGVkIEtleSA8RkdW\nTUJPWFc2UVBSTDRJMlpBWjZUWktEV0IyUEFNNjY3UE5HQ0tTRVROSEdBR1RVS1NH\nT1FNT0xJTFVTSk5YR0RZNUZPUjRJQUxPQ01TUjJXWE5GWlFKVFU2R01HVkpQR09S\nR0RQST0+iQI5BBMBCgAjBQJRkX6AAhsvBwsJCAcDAgEGFQgCCQoLBBYCAwECHgEC\nF4AACgkQPaif/ttqPFxozg/+LgcbcStFBRxSzui0gZI2OEIdhL2ELZRSflhjOx65\n0AfJO1hFZw3gHKrCv2AYwOXRqWhZHpwZ67qtkOTbwP2ENs2JNJodzk3CLRXm4Ygo\nCABbH0QFd85osareTKA8qt1dIfzFwHuprJRCoc0cOI0vtzyK1fyWxAriRkn+Df2u\ne4FWJRI3LfvvCccr0+xkfac9RkgAUVIl46NyplnfRCwiHH0UkAzXYxOCSZ7NbT63\nHImZ8pkQifReFgANwljjtMpuNKw33otp7FMBK/VyeBcsHj/KhotMLbdmJvQ3Jwur\n5zT9/jR1F6Bh+JTHk8+sQZtefA9DTpFI6mBQl56toCqWMWfXB58tlv/YYb7GPhJI\n3dhc6UBJ8AHYA3Sgv698hITmAM4Yy+RBrGfAVKPpCwKr4wNh58BtHQ4J0oT4cLEg\nOr03QbnLIfg8EFBkIEfA+GP4/kwVrdAQxqoRaksbWHXRUbXfaoraglYszAB9n004\nJ5B0g0JwSgRRK0RpSrFLgW50bfCX6NqeYTrxVL2tUXlZD8nKKu/tbmVymEgZameA\nVpR4sSU++9uBU3tsvqqqSR7SAKZDDz2kX8ofyvvqPeFXPQA4wyVcP9STgPUfYrn7\ntlSz6oq/fJHJcSw4TgY/zt7UZZxm0fJQgN8384mxsBfcHYyblx4kMPnjwQ/B5067\nIuA=\n=5Nq3\n-----END PGP PUBLIC KEY BLOCK-----\n", "type": "PGP" }, "last_updated": "2018-12-22T13:14:02.588646Z", "number_of_documents": 0, "number_of_messages": 1, "remove_star_url": "/api/v1/sources/dc17df20-6459-4317-8b3c-67b51997d3ce/remove_star", "replies_url": "/api/v1/sources/dc17df20-6459-4317-8b3c-67b51997d3ce/replies", "submissions_url": "/api/v1/sources/dc17df20-6459-4317-8b3c-67b51997d3ce/submissions", "url": "/api/v1/sources/dc17df20-6459-4317-8b3c-67b51997d3ce", "uuid": "dc17df20-6459-4317-8b3c-67b51997d3ce" } ] } ``` If you import the above key into GPG, you will see that it contains: - Journalist's pub key - First source's pub key - **Not** the second source's pub key ## Comments The root cause is here: ```python def export_pubkey(self, name): fingerprint = self.getkey(name) return self.gpg.export_keys(fingerprint) ``` if `None` is passed to `export_keys`, then it will return *all* pub keys. This bug will cause the following fatal error in the Qt client. ``` Traceback (most recent call last): File "/home/heartsucker/code/freedomofpress/securedrop-client/securedrop_client/logic.py", line 189, in <lambda> lambda: self.completed_api_call(new_thread_id, callback)) File "/home/heartsucker/code/freedomofpress/securedrop-client/securedrop_client/logic.py", line 242, in completed_api_call user_callback(result_data) File "/home/heartsucker/code/freedomofpress/securedrop-client/securedrop_client/logic.py", line 419, in on_synced self.gpg.import_key(source.uuid, pub_key) File "/home/heartsucker/code/freedomofpress/securedrop-client/securedrop_client/crypto.py", line 116, in import_key raise RuntimeError('Expected exactly one fingerprint. Found: {}' RuntimeError: Expected exactly one fingerprint. ``` In additional to the error in Qt, it seems likely this will cause errors in the web UI too (both interfaces), but I haven't tested that yet.
There is a related bit of code in `crypto_util` that makes that may be worth resolving along side this. ```python def getkey(self, name): for key in self.gpg.list_keys(): for uid in key['uids']: if name in uid: return key['fingerprint'] return None ``` If we call this like `app.crypto_util.getekey('a')`, the letter `a` will match many keys but only the first will be returned. This will most likely lead to us using the wrong key. Instead of listing all the keys and iterating through them, it may be better to use the GPG wrapper to extract exactly the key we want, then check that the extraction returned only one key. I'm fairly sure the wrapper supports this. If we do not make the change above, when we call `export_pubkey`, we may be able to just directly use the `filesystem_id` since this is unique enough to be able to extract the pub key. It also would avoid the error originally reported in the ticket. The new function would look like: ```python def export_pubkey(self, identifier): # 40 is the len of a fingerprint, scrypt filesystem_ids are longer than that if not identifier and not len(identifier) > 40: raise Exception("not today!") return self.gpg.export_keys(identifier) As a related issue, if we use the code snippet above of exporting by identifier, this happens ``` def test_export_pubkey(source_app, test_source): export = source_app.crypto_util.export_pubkey begin_pgp = '-----BEGIN PGP PUBLIC KEY BLOCK----' # check that a filesystem_id exports the pubkey exported_by_fid = export(test_source['filesystem_id']) assert exported_by_fid.startswith(begin_pgp) # check that a fingerprint exports the pubkey test_source_fpr = source_app.crypto_util.getkey( test_source['filesystem_id']) assert test_source_fpr # precondition exported_by_fpr = export(test_source_fpr) assert exported_by_fid.startswith(begin_pgp) # both exports should be equal > assert exported_by_fid == exported_by_fpr E AssertionError: assert '-----BEGIN P...Y BLOCK-----\n' == '-----BEGIN PG...Y BLOCK-----\n' E -----BEGIN PGP PUBLIC KEY BLOCK----- E E + mI0EUZF+gAEEANB0+sUg2oIC1gTupLac8JDAn89aEwZ1wdUoa6S1iXl0/G9Ez8Gk E + fwKHtREnYi9O2HzjKTnZmMTa2HOwrX5H+P4OhAhUIC5iWmA8UfBTg8iFtGK9OXy/ E + PEcEHRtTvNOn/b+tqlMiyWGv3AGpW4jaWbMmjKL6AEYcwUP2j4WayxlzABEBAAG0 E + fEF1dG9nZW5lcmF0ZWQgS2V5IDw0QzdESDNJRFBBUk9ONTVVWTJRVldCVkRZQ1hV E + RktHUVhWRURHTVZIM09WSE40RllSWlJRV0NaNDY3RlEyS0pBQ1pUWVBFRVRSN1dN E + R1ozQzRMWllQQUVJUzVMMlE2TlRXV0syRDVZPT6IuQQTAQoAIwUCUZF+gAIbLwcL E + CQgHAwIBBhUIAgkKCwQWAgMBAh4BAheAAAoJEHpVhzvUWhgO5OUEAL5+s1cPSDFB E + jq4Huk6SuKtS0BRY0vfKbPiyRUlKibxu+Gc/7ACBoeVq74SadxXhJEUeF/rD99qY E + 6jP933gHG2XYdaYOab/H0aHynaEqGH1EXgqv8dSpKcRqBzka1KkaWUhVQJIx2TVz E + fCZFbuMKbnSLiJgg9B1gbLNQrk5/lhxx E + =pQMj E - mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t E - wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU E - +8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM E - KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB E - 0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg E - u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2 E - ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B E - 5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf E - CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7 E - hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf E - yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB E - tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP E - RFVDVElPTimJAjgEEwECACIFAlKbxQMCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4B E - AheAAAoJEMxA7xIoJxRBF4QP/i4aTU4ZIkeKnYSyAA+vAvovdpP0Uwq355AIuiPk E - N2gfe1jSZcYw//Qz+girZj6roERdU0Rg7LZDNOec+qf3rm2o39JkXUI278hibEIm E - PtNImL8y5a2Dsh5tHwq9YsomNbg9TnJuYHaHPI7eNMTNdjePxuhwZ/HdDwHqi38x E - 0ryyzkmstIg+WBAeHJeWh7gJNOTRqVBu23woSnSQxeaqOvProCyh/CPNx7M6fU9J E - zRLi1XvQ6QgRql68FyvtydhC/eBZJekrjNk2D01nl0tHveDrgu/4aorRQGSy54x8 E - QSbH+4MF3v+7GbPuNYVvcP0wEsKMAxD6zwqHxslD8c8yMiHxP3Lx6T90jkiXt6EV E - pyLXLNP6og+3tICuPergpHmUI14grsG7wq67ZWB8JGyR7Y8kBCVq67uM7ddlwtK6 E - KHBN0sux75Yh1bqaLJzMk0C3WkeScex1cA16AmHt6gDL6CEWiTuRmlgxNH+LL3kS E - o3EC3qUuK9h7LNS18+MeMOmo9S2tlEaKmqYXAhxOfkT8eiSRWJRKLPHRJ3YvIna2 E - jHpr5rOWt5biFFjlZU9yVV5PKe8C0ytKXUMQStGhUr8BkqcGBQR4MJ32D8SJHM4u E - tflrNv2cKIWUsx5ZZWZt5++cJetcmpVA2tVl16VJlqQmkn+hCkrK//mdT2lyB5Wv E - M/rxiQI7BBMBAgAlAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAUCUpvFMwIZ E - AQAKCRDMQO8SKCcUQdYQD/41aBUYtUd4uD9xIZmmIPVUFKOUkcnzA6BOqxcszr9s E - d70ajzNQZuIoS0Hj38CrCSw6O1FYI5kzRGOG6UmJirrP7qOisesq8Zunfyc2NARE E - fiyYtIgNIsi06mJN3l8sUJh5supsvM1cmoOZP3/w3o0lEIHJCWfaQIa+66uGkwlm E - GqaP+nwzNFwKNU+VKbf4kqh3rejWUdeSQgxyLxiWBgMVXpnHEjShW9bbJ+WiDLhg E - 2zYrwlCBWr/nqNiSBMutAaJHGQsuvKlsLZTuZyGqO/TKDTDc9hBUjqJjoG9bDWnR E - I9rwJC31OPnTlW2LBRlMzmldJlB3GkzT2MJyvj4MjouTNOQV+eWvRUgYIgLBMBJw E - ytZb8y31hEMVUL7MPQPb5wm0l/D8AuP7lLaJJCsVTfSApfAH2k9/PjlB1ojZeDQo E - HduokYEy4XzF6U40cjm9kbxXcywcgxFiG77uZAoduO1BWEi9JZZPrLjYd3JtJjlH E - S9Z2bhKOBMhBdXavtaKTHwzBffKtOutMGXcL8FECFwbf8itxR9ARgOumoke/GeKR E - 9SarWL83wycflz/DtjfRmBne5KWpP+QtgUycy1cs2oAdcovEpomJ2uckIxKluU8X E - qtBhndh+CrVBE1Hr1OMdEcN5bh06Gg3fXebIqBePKPOz5RsJPjgqHz3y4y58BTzW E - XLkCDQRSWYtgARAA837/vToG+ChFhaJvczBfsYPG3Hfwre9v7Fi0Fuj8+vkpJixB E - 7pJUzvpO8YkOo3c1849a038t9ey+xudZ2gUm+hJH7/JrtqIDsK77YGJxgr3wqaKF E - EsXH4vmhCcyCS9vUwItUQi2ZteSkW5LxJfMEvdwUi4moOcOP/Hj9b13m6veRqwmc E - IjWXYXULN6p+I91Ub01v0mRyAHSWPpjH1DD46uHOLAPNqLOpFaxJ1nixn0/XfpJ3 E - 5vSf9kbpsvdGywGOkhZkWffw8cCsGyLFcvAkb1N0VRUl/BwgUHqUQkJJbPa+ylQa E - mBNloftGvvcBxxzSO1QcShlz35a4q8WNQAeb4y3F9YZl2wqMn+MrYHR8gig8/TnP E - sZICXslVul8EqnORIbjRV6d/guwRe3kGdURCS2y+grRJHhdIxwWk3ijP6TeH1YYz E - 4lPxbDZmRiscS8sQ55wyOaWPG4aYVccAUWeRrVTaolTQ8Pq0QAkGpaU9tTnAICz/ E - kc/qn90z8hGTeljxMfP++iC7kh2/JqTh+1v+deH+TbhWgJYJlJzt3E9dIYeCMDkP E - pL49KjMMPHwEiPQyRMV1GG98Q0gpjpCT4btfw6694HRQYWuP4wM+4wVpbFa9kSyc E - 4pX/DIvY/FqRyHz/ll7cFs9/omD0tEj6Ae4PQwNPhIu+tKSSX+9wBIw/nxcAEQEA E - AYkCHwQYAQIACQUCUlmLYAIbDAAKCRDMQO8SKCcUQaf3D/95Scfc+cs2QOlLy0Ee E - qSvldYstob2fkvhMGEQok4GE13pB8y11sI1hn1GA0xzHmkvTvQTETBjlN4p8m+Ox E - mUqENDdG0VU862Ov9In4VEidNDHpjqHSNW8+KQDJN7kfIlsoZWhp+yQCACpgpvit E - KpHiXetUvYsq8e7PUJmjT57qZuaXPm6TgJGysqu7BBaVjOUMfNp9bVTLZyFtjqET E - 28/NCZJEAdqZeko7JF6mfPY3BVhiOiAoWBDSnOucWnw7eLhbh1PuY+I9n+MkmEsH E - 6FgvezDoCS6gP1opsXnjQmBYcpFA1qy+ZigU4DF4B/7GJNxg6O37LGmGyp9cW21+ E - Bl0Jd/q7uM0+vxewsWRK4y2/nr0ocn+IrTPuQHWykYkc6K2qim5QWuE3yQUqh4rH E - XMXWXgXtffo8mOmvGTHBoa0HpLIisU0Ye8pQJOwi1YCxCBy8V8x6GdKABc9rpRvs E - x/SkvT3vDEMRwq5cn6EBJULiaGG62ATrtvIrbxj4M+vucYL2UTrz6KiJCJPA2LfZ E - SalBQXmbhSXeVtU7hyi8bnlWL0inhvnt04zmHSRbw2SJzOHDVq/DfdpmEoqYZY4h E - wClU0HiH9ZmtO0FB/7pd2AvoNOxfgS6JyCFwOrkFXm7+Yi8U4DsyEMB0yVbuDD4k E - DZ9YeHZNUaJ31f2ifnXtfqowPJiNBFGRfoABBADQdPrFINqCAtYE7qS2nPCQwJ/P E - WhMGdcHVKGuktYl5dPxvRM/BpH8Ch7URJ2IvTth84yk52ZjE2thzsK1+R/j+DoQI E - VCAuYlpgPFHwU4PIhbRivTl8vzxHBB0bU7zTp/2/rapTIslhr9wBqVuI2lmzJoyi E - +gBGHMFD9o+FmssZcwARAQABtHxBdXRvZ2VuZXJhdGVkIEtleSA8NEM3REgzSURQ E - QVJPTjU1VVkyUVZXQlZEWUNYVUZLR1FYVkVER01WSDNPVkhONEZZUlpSUVdDWjQ2 E - N0ZRMktKQUNaVFlQRUVUUjdXTUdaM0M0TFpZUEFFSVM1TDJRNk5UV1dLMkQ1WT0+ E - iLkEEwEKACMFAlGRfoACGy8HCwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRB6 E - VYc71FoYDuTlBAC+frNXD0gxQY6uB7pOkrirUtAUWNL3ymz4skVJSom8bvhnP+wA E - gaHlau+EmncV4SRFHhf6w/famOoz/d94Bxtl2HWmDmm/x9Gh8p2hKhh9RF4Kr/HU E - qSnEagc5GtSpGllIVUCSMdk1c3wmRW7jCm50i4iYIPQdYGyzUK5Of5YccQ== E - =4OIG E -----END PGP PUBLIC KEY BLOCK----- tests/test_crypto_util.py:299: AssertionError ``` You can see in the above that I'm exporting once by `filesystem_id` and then once by `fingerprint` and then compare. They are unexpectedly not equal. @redshiftzero mentioned that this may be an upstream issue, but also we're far behind the latest version of `python-gnupg` (see #3622) so :man_shrugging: Hmm, did you confirm that it is indeed the case that we should expect to be able to pass `filesystem_id` to `GPG.export_keys` instead of a fingerprint? Asking because [the docstring for that method](https://github.com/isislovecruft/python-gnupg/blob/2.3.1/gnupg/gnupg.py#L425) says that keyids or fingerprints are expected as args to `GPG.export_keys` When I tested just now, it looks like the behavior when the identifier actually doesn't match anything is to still return a public-key looking return value (i.e. the underlying problem here could just be passing an unexpected arg to `GPG.export_keys`): ``` (Pdb) source_app.crypto_util.gpg.export_keys('this definitely should not return anything') u'-----BEGIN PGP PUBLIC KEY BLOCK-----\n\nmQINBFJZi2ABEACZ ... [i snippity snipped some radix64 encoded stuff] ... RM3C5eMH/6s0g6shvlCb+DOCA==\n=IE/M\n-----END PGP PUBLIC KEY BLOCK-----\n' ``` Yeah I just woke up and checked that with `pgpdump` from the command line and this is def related to the lib. lol unexpected computer behavior. I guess we'll have to just traverse the whole list each time.
2019-01-02T12:57:08Z
[]
[]
freedomofpress/securedrop
4,023
freedomofpress__securedrop-4023
[ "3977" ]
a666138252872897625e82db085899eafc17beca
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -309,7 +309,7 @@ def get_current_user(): user = get_user_object(request) return jsonify(user.to_json()), 200 - def _handle_http_exception(error): + def _handle_api_http_exception(error): # Workaround for no blueprint-level 404/5 error handlers, see: # https://github.com/pallets/flask/issues/503#issuecomment-71383286 response = jsonify({'error': error.name, @@ -318,6 +318,6 @@ def _handle_http_exception(error): return response, error.code for code in default_exceptions: - api.errorhandler(code)(_handle_http_exception) + api.errorhandler(code)(_handle_api_http_exception) return api
diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -16,7 +16,6 @@ "Header set X-Content-Security-Policy: \"default-src 'self'\"", "Header set Content-Security-Policy: \"default-src 'self'\"", 'Header set Referrer-Policy "no-referrer"', - 'Header unset Etag', ] @@ -51,11 +50,11 @@ def test_apache_headers_journalist_interface(host, header): <Directory /var/www/> Options None AllowOverride None - <Limit GET POST HEAD> + <Limit GET POST HEAD DELETE> Order allow,deny allow from {apache_allow_from} </Limit> - <LimitExcept GET POST HEAD> + <LimitExcept GET POST HEAD DELETE> Order deny,allow Deny from all </LimitExcept> @@ -64,11 +63,11 @@ def test_apache_headers_journalist_interface(host, header): <Directory {securedrop_code}> Options None AllowOverride None - <Limit GET POST HEAD> + <Limit GET POST HEAD DELETE> Order allow,deny allow from {apache_allow_from} </Limit> - <LimitExcept GET POST HEAD> + <LimitExcept GET POST HEAD DELETE> Order deny,allow Deny from all </LimitExcept> diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -2024,3 +2024,9 @@ def test_does_set_cookie_headers(journalist_app, test_journo): observed_headers = response.headers assert 'Set-Cookie' in observed_headers.keys() assert 'Cookie' in observed_headers['Vary'] + + +def test_app_error_handlers_defined(journalist_app): + for status_code in [400, 401, 403, 404, 500]: + # This will raise KeyError if an app-wide error handler is not defined + assert journalist_app.error_handler_spec[None][status_code] diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -244,7 +244,16 @@ def test_user_without_token_cannot_post_protected_endpoints(journalist_app, assert response.status_code == 403 -def test_api_404(journalist_app, journalist_api_token): +def test_api_error_handlers_defined(journalist_app): + """Ensure the expected error handler is defined in the API blueprint""" + for status_code in [400, 401, 403, 404, 500]: + result = journalist_app.error_handler_spec['api'][status_code] + + expected_error_handler = '_handle_api_http_exception' + assert result.values()[0].__name__ == expected_error_handler + + +def test_api_error_handler_404(journalist_app, journalist_api_token): with journalist_app.test_client() as app: response = app.get('/api/v1/invalidendpoint', headers=get_api_headers(journalist_api_token))
DELETE methods not allowed by apache journalist interface config ## Description This is the issue causing https://github.com/freedomofpress/securedrop-client/issues/173 ## Steps to Reproduce 1. Provision staging VMs 2. Get a valid API token [[ref]](https://gist.github.com/redshiftzero/934093c4f8e0f47c53e1add475345e34) 3. Add a source via the source interface 4. Attempt to remove a star from a source (this is an idempotent operation so one should not need to first add the star to hit the endpoint): ``` curl -X DELETE -H "Content-Type: application/json" -H "Authorization: Token mytokenhere" --proxy socks5h://127.0.0.1:9150 mystagingserver.onion/api/v1/sources/my-source-uuid-here/remove_star ``` ## Expected Behavior `{"message":"Star removed"}` ## Actual Behavior A 403 occurs, and we get redirected to the login page. This redirect-for-403 behavior only happens for the API in staging because Apache is [sending 403s to `/notfound`](https://github.com/freedomofpress/securedrop/blame/eaa2d45d371b64d5bb82317209d686c63063fec8/install_files/ansible-base/roles/app/templates/sites-available/journalist.conf#L42), causing the [`request.path`](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/journalist_app/__init__.py#L136) to be `/notfound` (instead of something that begins with `thisismeonion.onion/api/blahhh`), meaning that code execution will continue until [the redirect here](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/journalist_app/__init__.py#L140). ## Comments The cause is the 403, and this is happening because of the [LimitExcept](https://httpd.apache.org/docs/current/mod/core.html#limitexcept) directive we are using in the Apache configs. These allow only GET POST and HEAD requests. To prove this to oneself, you can edit `/etc/apache2/sites-enabled/journalist.conf` and add DELETE as an allowed request method. This is similar to bugs #3772 and #3877. In light of this we should: 1. Revisit #1775 and, 2. Incorporate some API endpoint testing which would have detected all three bugs into at least the pre-release QA process (these could eventually be in CI as part of the external test client described in #3661)
We discussed this bug today in the engineering meeting and decided that the way to resolve is to modify the journalist apache config, and request admins that want to use the journalist API to run `securedrop-admin install`. Otherwise we'll need to do another brittle apache config migration in the `postinst` of `securedrop-app-code` and given that the changes here are more extensive than other migrations we've made, we should avoid it if possible. This does mean that we'll need to be extremely careful when making any other Apache config changes in the future (until we have a better story regarding minimizing config drift across instances - insert obligatory ansible pull reference #3136 which is looking like a pretty good way forward in the medium term imho). Also, when we resolve this via updating the journalist apache config template, we should also resolve #3877 at that time
2019-01-08T20:29:26Z
[]
[]
freedomofpress/securedrop
4,052
freedomofpress__securedrop-4052
[ "4047" ]
b725c81ab6755b02c90c698d35f4427bf1b30788
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -267,7 +267,8 @@ def all_source_replies(source_uuid): raise e return jsonify({'message': 'Your reply has been stored', - 'uuid': reply.uuid}), 201 + 'uuid': reply.uuid, + 'filename': reply.filename}), 201 @api.route('/sources/<source_uuid>/replies/<reply_uuid>', methods=['GET', 'DELETE'])
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -652,6 +652,9 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, reply = Reply.query.filter_by(uuid=str(reply_uuid)).one_or_none() assert reply is not None + # check that the filename is present and correct (#4047) + assert response.json['filename'] == reply.filename + with journalist_app.app_context(): # Now verify everything was saved. assert reply.journalist_id == test_journo['id'] assert reply.source_id == source_id
include `filename` in response when posting a reply ## Description This request is motivated by implementing the replies feature in the SD client. When we make a reply, we get an object back from the API like ```json { "message": "Message created or whaterver", "uuid": "some-uuud-abc123" } ``` This is sufficient for us to track in-flight message, but it is not enough information for us to be able to correctly order replies. We need the `filename` that the server generates as well to be able to display the messages in the correct order. This is especially true if the Tor connections that are made cause messages to reach the server out of order. We might as well return the entire object and drop the `message` field humans will not be seeing this and such messages are best conveyed the HTTP status code. Without returning this field, if we want to have correct ordering of messages, for every reply we send send, we must make an additional call to fetch the reply object from the API to retrieve the `filename` for ordering. ## User Stories As a consumer of the API, I want to make as few calls as possible in order to display information in a UI.
@heartsucker makes sense to me, please write down the full list of items you want to get (returned from the server) in the API call. `POST /api/v1/sources/<source_uuid>/replies` should return the same object as `GET /api/v1/sources/<source_uuid>/replies/<reply_uuid>` for consistency (even though some of that information is redundant). This is actually the only major change. The rest of the `POST`s and `DELETE`s shouldn't return ```json { "message": "The thing happened" } ``` But just be a `204 No Content` and have an empty body IMO. This is a breaking change, but since we're the only consumers of this API, I think that's acceptable. It makes sense to add the filename to the reply API response so we know the interaction count on the client side without doing a sync. > We might as well return the entire object and drop the message field humans will not be seeing this and such messages are best conveyed the HTTP status code. Why remove an informative message (and thus make a breaking change, which the other suggestion in this ticket isn't)? Developers are humans. In all the cases where we have a confirmation message, it is always the same for a given HTTP code. I like to use the Github API as an example of a fairly good API, and in the case of object creation, they return the full object. In the case of all `POST`s or `DELETE`s, they don't return a status message outside of the HTTP code. But w/e that's not really that important. I have a PR ready for the filename change which is all I really need to keep working.
2019-01-21T15:20:56Z
[]
[]
freedomofpress/securedrop
4,054
freedomofpress__securedrop-4054
[ "4053" ]
b725c81ab6755b02c90c698d35f4427bf1b30788
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -37,7 +37,10 @@ def decorated_function(*args, **kwargs): return abort(403, 'API token not found in Authorization header.') if auth_header: - auth_token = auth_header.split(" ")[1] + split = auth_header.split(" ") + if len(split) != 2 or split[0] != 'Token': + abort(403, 'Malformed authorization header.') + auth_token = split[1] else: auth_token = '' if not Journalist.validate_api_token_and_get_user(auth_token):
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -860,3 +860,24 @@ def test_api_does_not_set_cookie_headers(journalist_app, test_journo): assert 'Set-Cookie' not in observed_headers.keys() if 'Vary' in observed_headers.keys(): assert 'Cookie' not in observed_headers['Vary'] + + +# regression test for #4053 +def test_malformed_auth_token(journalist_app, journalist_api_token): + with journalist_app.app_context(): + # we know this endpoint requires an auth header + url = url_for('api.get_all_sources') + + with journalist_app.test_client() as app: + # precondition to ensure token is even valid + resp = app.get(url, headers={'Authorization': 'Token {}'.format(journalist_api_token)}) + assert resp.status_code == 200 + + resp = app.get(url, headers={'Authorization': 'not-token {}'.format(journalist_api_token)}) + assert resp.status_code == 403 + + resp = app.get(url, headers={'Authorization': journalist_api_token}) + assert resp.status_code == 403 + + resp = app.get(url, headers={'Authorization': 'too many {}'.format(journalist_api_token)}) + assert resp.status_code == 403
Malformed tokens throw exceptions ## Description A header of the form `Authorization: some-token-value-here` will cause an exception. ## Steps to Reproduce ```bash http --json GET localhost:8081/api/v1/sources 'Authorization: eyJhbGciOiJIUzI1NiIsImV4cCI6MTU0ODExNzEzNSwiaWF0IjoxNTQ4MDg4MzM1fQ.eyJpZCI6MX0.VjCdcVOPjsHWGl79wZAz4vjHVRANmCs_g-G6uyiFSl8' # -----------------------------------------------------------^ # note the missing token "token" ``` ## Expected Behavior HTTP 403 ## Actual Behavior ```pytb Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/home/heartsucker/code/freedomofpress/securedrop/securedrop/journalist_app/api.py", line 40, in decorated_function auth_token = auth_header.split(" ")[1] IndexError: list index out of range ``` ## Comments Detupling / indexing should be wrapped in a `try`/`except`.
2019-01-21T18:42:08Z
[]
[]
freedomofpress/securedrop
4,055
freedomofpress__securedrop-4055
[ "4027" ]
a71d42aeea71049f46d6ff39ad262973714006c3
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -12,6 +12,7 @@ import i18n import template_filters import version +import platform from crypto_util import CryptoUtil from db import db @@ -61,6 +62,12 @@ def create_app(config): app.config['SQLALCHEMY_DATABASE_URI'] = db_uri db.init_app(app) + # Magic values for Xenial upgrade message + app.config.update( + XENIAL_WARNING_DATE=datetime.strptime('Mar 4 2019', '%b %d %Y'), + XENIAL_VER='16.04' + ) + app.storage = Storage(config.STORE_DIR, config.TEMP_DIR, config.JOURNALIST_KEY) @@ -135,6 +142,10 @@ def setup_g(): g.html_lang = i18n.locale_to_rfc_5646(g.locale) g.locales = i18n.get_locale2name() + if (platform.linux_distribution()[1] != app.config['XENIAL_VER'] and + datetime.now() >= app.config['XENIAL_WARNING_DATE']): + g.show_xenial_warning = True + if request.path.split('/')[1] == 'api': pass # We use the @token_required decorator for the API endpoints else: # We are not using the API
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -5,6 +5,7 @@ import random import zipfile import base64 +import datetime from base64 import b64decode from cStringIO import StringIO @@ -1585,6 +1586,55 @@ def test_render_locales(config, journalist_app, test_journo, test_source): assert url_end + '?l=en_US' in text, text +def test_render_xenial_positive(config, journalist_app, test_journo): + yesterday = datetime.datetime.now() - datetime.timedelta(days=1) + journalist_app.config.update( + XENIAL_WARNING_DATE=yesterday, + XENIAL_VER='16.04' + ) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + resp = app.get(url_for('main.index')) + + text = resp.data.decode('utf-8') + assert "critical-skull" in text, text + + +def test_render_xenial_negative_version(config, journalist_app, test_journo): + yesterday = datetime.datetime.now() - datetime.timedelta(days=1) + journalist_app.config.update( + XENIAL_WARNING_DATE=yesterday, + XENIAL_VER='14.04' + ) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.get('/') + + text = resp.data.decode('utf-8') + assert "critical-skull" not in text, text + + +def test_render_xenial_negative_date(config, journalist_app, test_journo): + tomorrow = datetime.datetime.now() + datetime.timedelta(days=1) + journalist_app.config.update( + XENIAL_WARNING_DATE=tomorrow, + XENIAL_VER='16.04' + ) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.get('/') + + text = resp.data.decode('utf-8') + assert "critical-skull" not in text, text + + def test_download_selected_submissions_from_source(journalist_app, test_journo, test_source):
[xenial] Display advisory in Journalist Interface for instances running 14.04 (Trusty) Part of #3204, and a narrower version of #4001. As discussed in sprint planning today, at minimum, as part of the 0.12.0 release of SecureDrop, we'll want to display a warning (on the Journalist Interfaces) on instances running Trusty (14.04) that it's time to upgrade. This should link to the advisory on SecureDrop.org, which will include the most up-to-date information about the process. This can potentially be accomplished by ensuring that this feature is enabled only for the Trusty packages. To allow for sufficient time for post-release on-site "canary" testing, I would recommend displaying the warning only after March 4, 2019. # User Testing As a SecureDrop journalist or administrator, I want to be aware of major administrative issues concerning the security of my instance, so that I can ensure appropriate steps are taken to perform necessary updates.
Noting that I'll be posting mox to this Issue, sometime tomorrow. @ninavizz for first draft of upgrade message I'm thinking something like: "Your instance is running on an unsupported OS and must be upgraded to continue to receive security updates. For more information, see <link here>" (If you're thinking of different warning levels, this one would be critical.) ## Recommended banner/message/icon **Skully icon, Admin:** https://drive.google.com/open?id=1pIp60_EBLRaosW8YRDtAmt_nixLh46B3 **Skully icon, Journo:** https://drive.google.com/open?id=1lLeHEduq5oh9a6kq2i7XBWcPIK6iN9DM **Circle-bang icon, Journo:** https://drive.google.com/open?id=1VnlGMYrPwS86iwB1BmJd3nHwnBDR5V65 **Rationale:** * The user has not made an user-error, so first and foremost—it's important for them to understand that. Secondly, their instance has not in fact, been compromised—but is now more vulnerable to compromise, than it ordinarily would be. * Is there a way to currently display to users that an instance has been compromised? This would be good to know. * I'm inclined towards the skully icon, because this is a software end-of-life issue. Home automation and the ubiquity of tech in our personal lives, has made end-of-life concepts more common to non-technical users... hence, my comfort with that mental model. * My only hesitance, is that I don't want to confuse users with the idea their current instance may already be compromised. * Should the latter be too high a concern for others, my alternate suggestion is the circle-bang. I'm not keen on how close the dot is to the line on the exclamation-point, as it interferes with small-size legibility. Why not the existing triangle-bang? Because this isn't a user-error, and it's important to keep that symbology separate. * Finally: I'm usually averse to using red in user messaging. The problem with red, is that it piques user anxiety—which can interfere with cognitive processes in resolving errors. Because the user has not made a task-related, immediately correctable error however, that is not a problem in this use case—and if anything, that anxiety is in this rare situation, seems appropriate. ## Recommended message text Critical Security: The operating system on your SecureDrop server has reached its end-of-life. A manual update is urgently required to remain safe. Learn More **Sought points to communicate to the user** (a non-technical journalist, a nerdy journalist, or an admin) **seeing this, in order:** 1. You have not made an immediately correctable task-related mistake. 2. There is an urgent admin issue un-related to your tasks in this session. 3. You need to remember to act upon this once you've completed your current tasks. 4. The issue pertains to SD hardware outside the Workstation laptops & Tails sticks. 5. The issue pertains to the hardware most important to not be compromised. 6. Your current instance has not been compromised. 7. Your current instance is now vulnerable to compromise. 8. If you don't know who to communicate with about this, please contact SD support. Note: _At the **VERY TOP** of the article the above text links out to_, non-technical journalists should be spoken to in a single sentence/paragraph that encourages them to contact support if they don't know whom else to reach-out to for help, should help be needed. We learned from one customer in our recent user research, that their admin had left the org—and that the org had since been pokin' along without a SD admin, for several months. They'd "been meaning to" reach out to someone at FPF, but with the length of a journalist's to-do list, that was understandably low on the list. Their whole IT situation is in a state of transition, so the journos using SD also just didn't really know what to ask of FPF when contacting them. It's important to give folks in this situation a direct connection with a human to help them resolve this, as to not leave them dangling with such a vulnerability. My partner in the testing engaged with this user to prod them a little more about why they hadn't already yet reached-out to FPF, and the uncertainty w/ accompanying "oh, y'know" list of reasons was long. That has to be spoken to, simply, boldly, and discoverably. **Suggested icons:** Skully (preferred): https://fontawesome.com/icons/skull-crossbones?style=solid Circle-bang (alternate): https://fontawesome.com/icons/exclamation-circle?style=solid :+1: for skulls - one nit about the wording is that more than one server is affected. A SecureDrop instance has two, the application and monitor servers and both need updating. How about `The operating system used by your SecureDrop instance has reached its end-of-life. A manual update is urgently required to remain safe. Learn More`? @eloquence if the March 4 thing is a requirement, it would probably require either some kind of cronjob to be set up via scripts in 0.12.0 (fun to test), a point release on March 4 to update the app, or logic in the app with hard-coded dates. None of these options sound great to me. Is there a downside to having it displayed in 0.12.0 from release? Mh, the reason I think it might make sense to have it show up with a couple of weeks delay after the release is a) it'll give us some time to incorporate any findings with canary testers & early adopters into the advisory, b) it'll help spread out the support load a bit through Feb/Mar. The advisory URL is specific to Xenial so I'm not sure how problematic it is to also have a hardcoded date? But let's kick it around a bit on Monday, happy to drop that idea if y'all think it's not worth the extra complexity.
2019-01-21T20:16:21Z
[]
[]
freedomofpress/securedrop
4,088
freedomofpress__securedrop-4088
[ "4081" ]
21de59984ffc672ac7f9fee734e852f254f4e848
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -105,9 +105,12 @@ def get_token(): journalist = Journalist.login(username, passphrase, one_time_code) token_expiry = datetime.utcnow() + timedelta( seconds=TOKEN_EXPIRATION_MINS * 60) - response = jsonify({'token': journalist.generate_api_token( - expiration=TOKEN_EXPIRATION_MINS * 60), - 'expiration': token_expiry.isoformat() + 'Z'}) + + response = jsonify({ + 'token': journalist.generate_api_token(expiration=TOKEN_EXPIRATION_MINS * 60), + 'expiration': token_expiry.isoformat() + 'Z', + 'journalist_uuid': journalist.uuid, + }) # Update access metadata journalist.last_access = datetime.utcnow()
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -155,7 +155,8 @@ def test_journo(journalist_app): 'username': username, 'password': password, 'otp_secret': otp_secret, - 'id': user.id} + 'id': user.id, + 'uuid': user.uuid} @pytest.fixture(scope='function') diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -41,6 +41,7 @@ def test_valid_user_can_get_an_api_token(journalist_app, test_journo): headers=get_api_headers()) observed_response = json.loads(response.data) + assert observed_response['journalist_uuid'] == test_journo['uuid'] assert isinstance(Journalist.validate_api_token_and_get_user( observed_response['token']), Journalist) is True assert response.status_code == 200
Return current journalist ID when creating a token ## Description The endpoint that generates auth tokens needs to return the current user's UUID so the client knows which user it is without having to hit the user data endpoint.
2019-01-29T20:58:23Z
[]
[]
freedomofpress/securedrop
4,094
freedomofpress__securedrop-4094
[ "4013" ]
090fe88b524c56ac8da7c2061c7826c77f3c1d90
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -1,7 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import gnupg +from distutils.version import StrictVersion +import pretty_bad_protocol as gnupg import os import io import scrypt @@ -11,7 +12,7 @@ from base64 import b32encode from datetime import date from flask import current_app -from gnupg._util import _is_stream, _make_binary_stream +from pretty_bad_protocol._util import _is_stream, _make_binary_stream import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -79,7 +80,16 @@ def __init__(self, self.do_runtime_tests() - self.gpg = gnupg.GPG(binary='gpg2', homedir=gpg_key_dir) + # --pinentry-mode, required for SecureDrop on gpg 2.1.x+, was + # added in gpg 2.1. + self.gpg_key_dir = gpg_key_dir + gpg_binary = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + if StrictVersion(gpg_binary.binary_version) >= StrictVersion('2.1'): + self.gpg = gnupg.GPG(binary='gpg2', + homedir=gpg_key_dir, + options=['--pinentry-mode loopback']) + else: + self.gpg = gpg_binary # map code for a given language to a localized wordlist self.__language2words = {} # type: Dict[Text, List[str]] @@ -176,7 +186,7 @@ def genkeypair(self, name, secret): """ name = clean(name) secret = self.hash_codename(secret, salt=self.scrypt_gpg_pepper) - return self.gpg.gen_key(self.gpg.gen_key_input( + genkey_obj = self.gpg.gen_key(self.gpg.gen_key_input( key_type=self.GPG_KEY_TYPE, key_length=self.__gpg_key_length, passphrase=secret, @@ -184,6 +194,7 @@ def genkeypair(self, name, secret): creation_date=self.DEFAULT_KEY_CREATION_DATE.isoformat(), expire_date=self.DEFAULT_KEY_EXPIRATION_DATE )) + return genkey_obj def delete_reply_keypair(self, source_filesystem_id): key = self.getkey(source_filesystem_id) @@ -191,10 +202,12 @@ def delete_reply_keypair(self, source_filesystem_id): # keypair if not key: return - # The private key needs to be deleted before the public key can be - # deleted. http://pythonhosted.org/python-gnupg/#deleting-keys - self.gpg.delete_keys(key, True) # private key - self.gpg.delete_keys(key) # public key + + # Always delete keys without invoking pinentry-mode = loopback + # see: https://lists.gnupg.org/pipermail/gnupg-users/2016-May/055965.html + temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + # The subkeys keyword argument deletes both secret and public keys. + temp_gpg.delete_keys(key, secret=True, subkeys=True) def getkey(self, name): for key in self.gpg.list_keys(): diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py --- a/securedrop/secure_tempfile.py +++ b/securedrop/secure_tempfile.py @@ -4,7 +4,7 @@ import io from tempfile import _TemporaryFileWrapper -from gnupg._util import _STREAMLIKE_TYPES +from pretty_bad_protocol._util import _STREAMLIKE_TYPES from cryptography.exceptions import AlreadyFinalized from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.algorithms import AES diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -86,9 +86,8 @@ def lookup(): try: with io.open(reply_path, "rb") as f: contents = f.read() - reply.decrypted = current_app.crypto_util.decrypt( - g.codename, - contents).decode('utf-8') + reply_obj = current_app.crypto_util.decrypt(g.codename, contents) + reply.decrypted = reply_obj.decode('utf-8') except UnicodeDecodeError: current_app.logger.error("Could not decode reply %s" % reply.filename)
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import gnupg +import pretty_bad_protocol as gnupg import logging import os import io @@ -89,6 +89,11 @@ def config(tmpdir): tmp = data.mkdir('tmp') sqlite = data.join('db.sqlite') + # gpg 2.1+ requires gpg-agent, see #4013 + gpg_agent_config = str(keys.join('gpg-agent.conf')) + with open(gpg_agent_config, 'w+') as f: + f.write('allow-loopback-pinentry') + gpg = gnupg.GPG('gpg2', homedir=str(keys)) for ext in ['sec', 'pub']: with io.open(path.join(path.dirname(__file__), diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -1,12 +1,10 @@ # -*- coding: utf-8 -*- -import gnupg +from distutils.version import StrictVersion import gzip import os import random import re -import shutil -import tempfile import zipfile from base64 import b32encode @@ -326,11 +324,10 @@ def assertion(): zf = zipfile.ZipFile(StringIO(resp.data), 'r') data = zf.read(zf.namelist()[0]) - _can_decrypt_with_key(journalist_app, data, config.JOURNALIST_KEY) + _can_decrypt_with_key(journalist_app, data) _can_decrypt_with_key( journalist_app, data, - current_app.crypto_util.getkey(filesystem_id), codename) # Test deleting reply on the journalist interface @@ -400,39 +397,28 @@ def assertion(): utils.async.wait_for_assertion(assertion) -def _can_decrypt_with_key(journalist_app, msg, key_fpr, passphrase=None): +def _can_decrypt_with_key(journalist_app, msg, passphrase=None): """ - Test that the given GPG message can be decrypted with the given key - (identified by its fingerprint). + Test that the given GPG message can be decrypted. """ - # GPG does not provide a way to specify which key to use to decrypt a - # message. Since the default keyring that we use has both the - # `config.JOURNALIST_KEY` and all of the reply keypairs, there's no way - # to use it to test whether a message is decryptable with a specific - # key. - gpg_tmp_dir = tempfile.mkdtemp() - gpg = gnupg.GPG('gpg2', homedir=gpg_tmp_dir) - - # Export the key of interest from the application's keyring - pubkey = journalist_app.crypto_util.gpg.export_keys(key_fpr) - seckey = journalist_app.crypto_util.gpg.export_keys(key_fpr, secret=True) - # Import it into our isolated temporary GPG directory - for key in (pubkey, seckey): - gpg.import_keys(key) - - # Attempt decryption with the given key + + # For gpg 2.1+, a non null passphrase _must_ be passed to decrypt() + using_gpg_2_1 = StrictVersion( + journalist_app.crypto_util.gpg.binary_version) >= StrictVersion('2.1') + if passphrase: passphrase = journalist_app.crypto_util.hash_codename( passphrase, salt=journalist_app.crypto_util.scrypt_gpg_pepper) - decrypted_data = gpg.decrypt(msg, passphrase=passphrase) + elif using_gpg_2_1: + passphrase = 'dummy passphrase' + + decrypted_data = journalist_app.crypto_util.gpg.decrypt( + msg, passphrase=passphrase) assert decrypted_data.ok, \ "Could not decrypt msg with key, gpg says: {}" \ .format(decrypted_data.stderr) - # We have to clean up the temporary GPG dir - shutil.rmtree(gpg_tmp_dir) - def test_reply_normal(journalist_app, source_app, diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1586,13 +1586,16 @@ def test_render_locales(config, journalist_app, test_journo, test_source): assert url_end + '?l=en_US' in text, text -def test_render_xenial_positive(config, journalist_app, test_journo): +def test_render_xenial_positive(config, journalist_app, test_journo, mocker): yesterday = datetime.datetime.now() - datetime.timedelta(days=1) journalist_app.config.update( XENIAL_WARNING_DATE=yesterday, XENIAL_VER='16.04' ) + mocked_error_platform = mocker.patch('platform.linux_distribution') + mocked_error_platform.return_value = ('Ubuntu', '14.04', 'trusty') + with journalist_app.test_client() as app: _login_user(app, test_journo['username'], test_journo['password'], test_journo['otp_secret']) @@ -1603,13 +1606,16 @@ def test_render_xenial_positive(config, journalist_app, test_journo): assert "critical-skull" in text, text -def test_render_xenial_negative_version(config, journalist_app, test_journo): +def test_render_xenial_negative_version(config, journalist_app, test_journo, mocker): yesterday = datetime.datetime.now() - datetime.timedelta(days=1) journalist_app.config.update( XENIAL_WARNING_DATE=yesterday, - XENIAL_VER='14.04' + XENIAL_VER='16.04' ) + mocked_error_platform = mocker.patch('platform.linux_distribution') + mocked_error_platform.return_value = ('Ubuntu', '16.04', 'xenial') + with journalist_app.test_client() as app: _login_user(app, test_journo['username'], test_journo['password'], test_journo['otp_secret']) diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py --- a/securedrop/tests/test_secure_tempfile.py +++ b/securedrop/tests/test_secure_tempfile.py @@ -3,7 +3,7 @@ import os import pytest -from gnupg._util import _is_stream +from pretty_bad_protocol._util import _is_stream os.environ['SECUREDROP_ENV'] = 'test' # noqa from secure_tempfile import SecureTemporaryFile diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py --- a/securedrop/tests/utils/env.py +++ b/securedrop/tests/utils/env.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Testing utilities related to setup and teardown of test environment. """ -import gnupg +from distutils.version import StrictVersion +import pretty_bad_protocol as gnupg import os import io import shutil @@ -37,7 +38,20 @@ def init_gpg(): """Initialize the GPG keyring and import the journalist key for testing. """ - gpg = gnupg.GPG('gpg2', homedir=config.GPG_KEY_DIR) + + # gpg 2.1+ requires gpg-agent, see #4013 + gpg_agent_config = os.path.join(config.GPG_KEY_DIR, 'gpg-agent.conf') + with open(gpg_agent_config, 'w+') as f: + f.write('allow-loopback-pinentry') + + gpg_binary = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR) + if StrictVersion(gpg_binary.binary_version) >= StrictVersion('2.1'): + gpg = gnupg.GPG(binary='gpg2', + homedir=config.GPG_KEY_DIR, + options=['--pinentry-mode loopback']) + else: + gpg = gpg_binary + # Faster to import a pre-generated key than to gen a new one every time. for keyfile in (join(FILES_DIR, "test_journalist_key.pub"), join(FILES_DIR, "test_journalist_key.sec")):
[xenial] Journalist replies are not displayed in source interface ## Steps to Reproduce * Spin up a Xenial staging environment (`make build-debs-xenial` and `molecule converge -s libvirt-staging-xenial`) * Submit a document or message from the source interface * Reply to said document or message * Observe empty reply on the source interface ## Expected Behavior Source should be able to view journalist reply ## Actual Behavior Journalist reply is blank in source interface ## Comments Several changes were made in gpg 2.1 which is being used by Xenial [0], specifically the removal of the secret keyring [0] : https://www.gnupg.org/faq/whats-new-in-2.1.html
I took a look at this today, here's where I'm at so far: I can reproduce this bug in a Xenial-based SecureDrop dev container. The secret keys for decrypting the replies are available, e.g.: ``` >>> self.gpg.list_keys(secret=True) gnupg._parsers.ListKeys([{'dummy': u'', 'keyid': u'53ABF186E68948B9', 'expires': u'', 'rev': {}, 'sigs': {u'Autogenerated Key <JO2JDJSLGIPNTKEDZUECNFMUG2QYXVAS6VAMNXIMIAAIORSFJSG : []}, 'subkeys': [], 'length': u'4096', 'ownertrust': u'u', 'algo': u'1', 'fingerprint': u'DE410BBA310AA8745E3BE7E653ABF186E68948B9', 'date': u'1368489600', 'trust': u'u', 'type': u'sec', 'uids': [u'Autogenerated Key <JO2JDJSLGIPNTKEDZUECNFMUG2QYXVAS6VAMNXIMIAAIORSFJSG ] }, {'dummy': u'', 'keyid': u'5761A79A11779651', 'expires': u'', 'rev': {}, 'sigs': {u'Autogenerated Key <3OQPA6YBNFBM3BEQH7KHYWPNLDBHKHP76HP2SCW6324K45QDHVD : []}, 'subkeys': [], 'length': u'4096', 'ownertrust': u'u', 'algo': u'1', 'fingerprint': u'D90BD4822154FF63A54B95A55761A79A11779651', 'date': u'1368489600', 'trust': u'u', 'type': u'sec', 'uids': [u'Autogenerated Key <3OQPA6YBNFBM3BEQH7KHYWPNLDBHKHP76HP2SCW6324K45QDHVD ] }, {'dummy': u'', 'keyid': u'B4863A093C809921', 'expires': u'', 'rev': {}, 'sigs': {u'Autogenerated Key <ZOO3FPHBLKQQREIQ7ECPMGTPVAFXUSLEDUY2ZENQSWRWABALMPS : []}, 'subkeys': [], 'length': u'4096', 'ownertrust': u'u', 'algo': u'1', 'fingerprint': u'2E945059A3A56EAE4F15DD7DB4863A093C809921', 'date': u'1368489600', 'trust': u'u', 'type': u'sec', 'uids': [u'Autogenerated Key <ZOO3FPHBLKQQREIQ7ECPMGTPVAFXUSLEDUY2ZENQSWRWABALMPS ] }]) ``` But indeed, when I try decrypting a test file, the decrypted content is empty (one sees `self.gpg` below because I put a breakpoint in `crypto_util.py`): ``` >>> input_data = self.gpg.gen_key_input(name_email='[email protected]', passphrase='test') >>> key = self.gpg.gen_key(input_data) >>> key.fingerprint u'E27203CAB6B15F1B4A19260276A8A236B3C334CA' >>> encrypted = self.gpg.encrypt('test data', key.fingerprint) >>> encrypted.data '-----BEGIN PGP MESSAGE-----\n\nhQEMA8k3DSXIftBUAQf/YZg9yiIRoWv8NEtwLFR2j >>> encrypted.ok True >>> decrypted = self.gpg.decrypt(str(encrypted), passphrase='test') >>> decrypted.data '' >>> decrypted.ok False >>> decrypted.stderr u'[GNUPG:] ENC_TO C9370D25C87ED054 1 0\n[GNUPG:] PINENTRY_LAUNCHED 984\ngpg: encrypted with 2048-bit RSA key, ID C87ED054, created 2019-01-11\n "Autogenerated Key <[email protected]>"\ngpg: public key decryption failed: Inappropriate ioctl for device\n[GNUPG:] ERROR pkdecrypt_failed 83918950\n[GNUPG:] BEGIN_DECRYPTION\n[GNUPG:] DECRYPTION_FAILED\ngpg: decryption failed: No secret key\n[GNUPG:] END_DECRYPTION\n' >>> ``` This looks to be the `PINENTRY_LAUNCHED` issue described in https://github.com/isislovecruft/python-gnupg/issues/98, https://github.com/isislovecruft/python-gnupg/issues/122, and https://github.com/isislovecruft/python-gnupg/issues/137. The version of python-gnupg we are using includes the patches that correspond to those issues (released in 2.2.0, we're on 2.3.1). In those issues modifying `gpg-agent.conf` to allow the loopback option resolved the issue. I haven't tried yet, but will try this next. If anyone beats me to it, please comment on this issue. Running the full application test suite on Xenial indicates that there are a [number of other test failures](https://circleci.com/gh/freedomofpress/securedrop/20624) which need to be investigated and resolved (will scope as part of this issue and file followups where appropriate). Adding the loopback pinentry configuration did not help in my system. It still failed to decrypt.
2019-02-01T03:19:39Z
[]
[]
freedomofpress/securedrop
4,097
freedomofpress__securedrop-4097
[ "4059" ]
8339e327626c225dd3f04b6eed2040e86afb149f
diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py --- a/securedrop/source_app/api.py +++ b/securedrop/source_app/api.py @@ -1,4 +1,5 @@ import json +import platform from flask import Blueprint, make_response @@ -12,6 +13,7 @@ def make_blueprint(config): def metadata(): meta = {'gpg_fpr': config.JOURNALIST_KEY, 'sd_version': version.__version__, + 'server_os': platform.linux_distribution()[1], } resp = make_response(json.dumps(meta)) resp.headers['Content-Type'] = 'application/json'
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -16,6 +16,7 @@ from db import db from models import Source, Reply from source_app import main as source_app_main +from source_app import api as source_app_api from utils.db_helper import new_codename from utils.instrument import InstrumentedApp @@ -516,12 +517,16 @@ def test_why_journalist_key(source_app): def test_metadata_route(source_app): - with source_app.test_client() as app: - resp = app.get(url_for('api.metadata')) - assert resp.status_code == 200 - assert resp.headers.get('Content-Type') == 'application/json' - assert json.loads(resp.data.decode('utf-8')).get('sd_version') \ - == version.__version__ + with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: + mocked_platform.return_value = ("Ubuntu", "16.04", "xenial") + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert json.loads(resp.data.decode('utf-8')).get('sd_version') \ + == version.__version__ + assert json.loads(resp.data.decode('utf-8')).get('server_os') \ + == '16.04' def test_login_with_overly_long_codename(source_app):
[xenial] Add base OS info to metadata endpoint ## Description Part of #3204. In order to have better visibility into which SecureDrop instances are actively updating to from Ubuntu Trusty to Ubuntu Xenial (and actively engage with admins as needed), it makes sense to expose this info via the existing metadata endpoint. To avoid breaking existing uses, this should be done in a new `server_os` variable. From a security perspective, this does not give an attacker key information -- for now, both releases receive security updates; once 14.04 has reached EOL, it will also no longer receive SecureDrop package updates, which will be reflected in its already exposed SecureDrop version. # User Stories As a SecureDrop administrator, I'd like to be actively alerted by the support team when I have to perform critical security updates, so that I can act on them with appropriate priority. As a SecureDrop support team member, I'd like to know whether an instance is following recommended upgrade procedures, so I can increase the urgency of outreach efforts if required.
@nightwarrior-xxx By all means! We're in a crunch over the next couple of weeks to get all the must-do functionality in #3204 in; if you have time to help with this issue over the next few days, it would be much appreciated. :) @nightwarrior-xxx you commented on this last night but have since deleted your comment. Are you going to work on this or should someone else? @heartsucker Yes,I would love to work.I thought to ask you on gitter first before taking up this issue Yeah then it's all yours @nightwarrior-xxx see the changes added in #4055 for a pretty basic way to check OS versions. This would be a really useful addition from a support perspective. Thanks for grabbing it! @zenmonkeykstop [Just to clear confusion] I have to import platform and store linux_distribution in a variable called ```server_os``` in the file ```securedrop/admin/securedrop_admin/init.py``` Or in ```securedrop/admin/tests/test_securedrop-admin-setup.py``` You don't have to add a variable to that file. You can just add it under the route in `source_app/api.py`.
2019-02-01T21:06:33Z
[]
[]
freedomofpress/securedrop
4,099
freedomofpress__securedrop-4099
[ "3964" ]
530a23901a98f436413fe780110f9ea22ba33f51
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -40,7 +40,7 @@ def create_app(config): template_folder=config.JOURNALIST_TEMPLATES_DIR, static_folder=path.join(config.SECUREDROP_ROOT, 'static')) - app.config.from_object(config.JournalistInterfaceFlaskConfig) + app.config.from_object(config.JournalistInterfaceFlaskConfig) # type: ignore app.sdconfig = config app.session_interface = JournalistInterfaceSessionInterface() diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -37,7 +37,7 @@ def create_app(config): template_folder=config.SOURCE_TEMPLATES_DIR, static_folder=path.join(config.SECUREDROP_ROOT, 'static')) app.request_class = RequestThatSecuresFileUploads - app.config.from_object(config.SourceInterfaceFlaskConfig) + app.config.from_object(config.SourceInterfaceFlaskConfig) # type: ignore app.sdconfig = config # The default CSRF token expiration is 1 hour. Since large uploads can
diff --git a/molecule/builder-trusty/tests/test_build_dependencies.py b/molecule/builder-trusty/tests/test_build_dependencies.py --- a/molecule/builder-trusty/tests/test_build_dependencies.py +++ b/molecule/builder-trusty/tests/test_build_dependencies.py @@ -8,43 +8,43 @@ ] -def test_pip_wheel_installed(Command): +def test_pip_wheel_installed(host): """ Ensure `wheel` is installed via pip, for packaging Python dependencies into a Debian package. """ - c = Command("pip list installed") + c = host.run("pip list installed") assert "wheel" in c.stdout assert c.rc == 0 -def test_sass_gem_installed(Command): +def test_sass_gem_installed(host): """ Ensure the `sass` Ruby gem is installed, for compiling SASS to CSS. """ - c = Command("gem list") + c = host.run("gem list") assert "sass (3.4.23)" in c.stdout assert c.rc == 0 -def test_pip_dependencies_installed(Command): +def test_pip_dependencies_installed(host): """ Ensure the development pip dependencies are installed """ - c = Command("pip list installed") + c = host.run("pip list installed") assert "Flask-Babel" in c.stdout assert c.rc == 0 @pytest.mark.xfail(reason="This check conflicts with the concept of pegging" "dependencies") -def test_build_all_packages_updated(Command): +def test_build_all_packages_updated(host): """ Ensure a dist-upgrade has already been run, by checking that no packages are eligible for upgrade currently. This will ensure that all upgrades, security and otherwise, have been applied to the VM used to build packages. """ - c = Command('aptitude --simulate -y dist-upgrade') + c = host.run('aptitude --simulate -y dist-upgrade') assert c.rc == 0 assert "No packages will be installed, upgraded, or removed." in c.stdout diff --git a/molecule/builder-trusty/tests/test_legacy_paths.py b/molecule/builder-trusty/tests/test_legacy_paths.py --- a/molecule/builder-trusty/tests/test_legacy_paths.py +++ b/molecule/builder-trusty/tests/test_legacy_paths.py @@ -9,7 +9,7 @@ '/tmp/build-securedrop-ossec-agent', '/tmp/build-securedrop-ossec-server', ]) -def test_build_ossec_apt_dependencies(File, build_path): +def test_build_ossec_apt_dependencies(host, build_path): """ Ensure that unwanted build paths are absent. Most of these were created as unwanted side-effects during CI-related changes to the build scripts. @@ -17,4 +17,4 @@ def test_build_ossec_apt_dependencies(File, build_path): All paths are rightly considered "legacy" and should never be present on the build host. This test is strictly for guarding against regressions. """ - assert not File(build_path).exists + assert not host.file(build_path).exists diff --git a/molecule/builder-trusty/tests/test_securedrop_deb_package.py b/molecule/builder-trusty/tests/test_securedrop_deb_package.py --- a/molecule/builder-trusty/tests/test_securedrop_deb_package.py +++ b/molecule/builder-trusty/tests/test_securedrop_deb_package.py @@ -72,18 +72,18 @@ def get_deb_tags(): @pytest.mark.parametrize("deb", deb_packages) -def test_build_deb_packages(File, deb): +def test_build_deb_packages(host, deb): """ Sanity check the built Debian packages for Control field values and general package structure. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) assert deb_package.is_file @pytest.mark.parametrize("deb", deb_packages) -def test_deb_packages_appear_installable(File, Command, Sudo, deb): +def test_deb_packages_appear_installable(host, deb): """ Confirms that a dry-run of installation reports no errors. Simple check for valid Debian package structure, but not thorough. @@ -94,16 +94,16 @@ def test_deb_packages_appear_installable(File, Command, Sudo, deb): Testing application behavior is left to the functional tests. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) deb_basename = os.path.basename(deb_package.path) package_name = extract_package_name_from_filepath(deb_package.path) assert deb_basename.startswith(package_name) - # Sudo is required to call `dpkg --install`, even as dry-run. - with Sudo(): - c = Command("dpkg --install --dry-run {}".format(deb_package.path)) + # sudo is required to call `dpkg --install`, even as dry-run. + with host.sudo(): + c = host.run("dpkg --install --dry-run {}".format(deb_package.path)) assert "Selecting previously unselected package {}".format( package_name) in c.stdout regex = "Preparing to unpack [./]+{} ...".format( @@ -113,18 +113,18 @@ def test_deb_packages_appear_installable(File, Command, Sudo, deb): @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_control_fields(File, Command, deb): +def test_deb_package_control_fields(host, deb): """ Ensure Debian Control fields are populated as expected in the package. These checks are rather superficial, and don't actually confirm that the .deb files are not broken. At a later date, consider integration tests that actually use these built files during an Ansible provisioning run. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) package_name = extract_package_name_from_filepath(deb_package.path) # The `--field` option will display all fields if none are specified. - c = Command("dpkg-deb --field {}".format(deb_package.path)) + c = host.run("dpkg-deb --field {}".format(deb_package.path)) assert "Maintainer: SecureDrop Team <[email protected]>" in c.stdout # The securedrop-config package is architecture indepedent @@ -138,11 +138,11 @@ def test_deb_package_control_fields(File, Command, deb): @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_control_fields_homepage(File, Command, deb): - deb_package = File(deb.format( +def test_deb_package_control_fields_homepage(host, deb): + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) # The `--field` option will display all fields if none are specified. - c = Command("dpkg-deb --field {}".format(deb_package.path)) + c = host.run("dpkg-deb --field {}".format(deb_package.path)) # The OSSEC source packages will have a different homepage; # all other packages should set securedrop.org as homepage. if os.path.basename(deb_package.path).startswith('ossec-'): @@ -152,7 +152,7 @@ def test_deb_package_control_fields_homepage(File, Command, deb): @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_contains_no_config_file(File, Command, deb): +def test_deb_package_contains_no_config_file(host, deb): """ Ensures the `securedrop-app-code` package does not ship a `config.py` file. Doing so would clobber the site-specific changes made via Ansible. @@ -160,53 +160,53 @@ def test_deb_package_contains_no_config_file(File, Command, deb): Somewhat lazily checking all deb packages, rather than just the app-code package, but it accomplishes the same in a DRY manner. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) assert not re.search("^.*/config\.py$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_contains_pot_file(File, Command, deb): +def test_deb_package_contains_pot_file(host, deb): """ Ensures the `securedrop-app-code` package has the messages.pot file """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # Only relevant for the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: assert re.search("^.*messages.pot$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_contains_mo_file(File, Command, deb): +def test_deb_package_contains_mo_file(host, deb): """ Ensures the `securedrop-app-code` package has at least one compiled mo file. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # Only relevant for the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: assert re.search("^.*messages\.mo$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_contains_no_generated_assets(File, Command, deb): +def test_deb_package_contains_no_generated_assets(host, deb): """ Ensures the `securedrop-app-code` package does not ship a minified static assets, which are built automatically via Flask-Assets, and may be present in the source directory used to build from. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) # Only relevant for the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: - c = Command("dpkg-deb --contents {}".format(deb_package.path)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # static/gen/ directory should exist assert re.search("^.*\./var/www/securedrop" "/static/gen/$", c.stdout, re.M) @@ -229,17 +229,17 @@ def test_deb_package_contains_no_generated_assets(File, Command, deb): @pytest.mark.parametrize("deb", deb_packages) -def test_deb_package_contains_css(File, Command, deb): +def test_deb_package_contains_css(host, deb): """ Ensures the `securedrop-app-code` package contains files that are generated during the `sass` build process. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) # Only relevant for the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: - c = Command("dpkg-deb --contents {}".format(deb_package.path)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) for css_type in ['journalist', 'source']: assert re.search("^.*\./var/www/securedrop/static/" @@ -247,13 +247,13 @@ def test_deb_package_contains_css(File, Command, deb): @pytest.mark.parametrize("deb, tag", deb_tags) -def test_deb_package_lintian(File, Command, deb, tag): +def test_deb_package_lintian(host, deb, tag): """ Ensures lintian likes our Debian packages. """ - deb_package = File(deb.format( + deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) - c = Command("""lintian --tags {} --no-tag-display-limit {}""".format( + c = host.run("lintian --tags {} --no-tag-display-limit {}".format( tag, deb_package.path)) assert len(c.stdout) == 0 diff --git a/molecule/testinfra/staging/app-code/test_haveged.py b/molecule/testinfra/staging/app-code/test_haveged.py --- a/molecule/testinfra/staging/app-code/test_haveged.py +++ b/molecule/testinfra/staging/app-code/test_haveged.py @@ -1,11 +1,11 @@ testinfra_hosts = ["app-staging"] -def test_haveged_config(File): +def test_haveged_config(host): """ Ensure haveged's low entrop watermark is sufficiently high. """ - f = File('/etc/default/haveged') + f = host.file('/etc/default/haveged') assert f.is_file assert f.user == 'root' assert f.group == 'root' @@ -13,25 +13,25 @@ def test_haveged_config(File): assert f.contains('^DAEMON_ARGS="-w 2400"$') -def test_haveged_no_duplicate_lines(Command): +def test_haveged_no_duplicate_lines(host): """ Regression test to check for duplicate entries. Earlier playbooks for configuring the SD instances needlessly appended the `DAEMON_ARGS` line everytime the playbook was run. Fortunately the duplicate lines don't break the service, but it's still poor form. """ - c = Command("uniq --repeated /etc/default/haveged") + c = host.run("uniq --repeated /etc/default/haveged") assert c.rc == 0 assert c.stdout == "" -def test_haveged_is_running(Service, Sudo): +def test_haveged_is_running(host): """ Ensure haveged service is running, to provide additional entropy. """ - # Sudo is necessary to read /proc when running under grsecurity, + # sudo is necessary to read /proc when running under grsecurity, # which the App hosts do. Not technically necessary under development. - with Sudo(): - s = Service("haveged") + with host.sudo(): + s = host.service("haveged") assert s.is_running assert s.is_enabled diff --git a/molecule/testinfra/staging/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py --- a/molecule/testinfra/staging/app-code/test_redis_worker.py +++ b/molecule/testinfra/staging/app-code/test_redis_worker.py @@ -18,19 +18,19 @@ "user={}".format(securedrop_test_vars.securedrop_user), 'environment=HOME="/tmp/python-gnupg"', ]) -def test_redis_worker_configuration(File, config_line): +def test_redis_worker_configuration(host, config_line): """ Ensure SecureDrop Redis worker config for supervisor service management is configured correctly. """ - f = File('/etc/supervisor/conf.d/securedrop_worker.conf') + f = host.file('/etc/supervisor/conf.d/securedrop_worker.conf') # Config lines may have special characters such as [] which will # throw off the regex matching, so let's escape those chars. regex = re.escape(config_line) assert f.contains('^{}$'.format(regex)) -def test_redis_worker_config_file(File): +def test_redis_worker_config_file(host): """ Ensure SecureDrop Redis worker config for supervisor service management has proper ownership and mode. @@ -38,7 +38,7 @@ def test_redis_worker_config_file(File): Using separate test so that the parametrization doesn't rerun the file mode checks, which would be useless. """ - f = File('/etc/supervisor/conf.d/securedrop_worker.conf') + f = host.file('/etc/supervisor/conf.d/securedrop_worker.conf') assert f.is_file assert oct(f.mode) == '0644' assert f.user == "root" diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -5,13 +5,13 @@ securedrop_test_vars = pytest.securedrop_test_vars -def test_apache_default_docroot_is_absent(File): +def test_apache_default_docroot_is_absent(host): """ Ensure that the default docroot for Apache, containing static HTML under Debian, has been removed. Leaving it in place can be a privacy leak, as it displays version information by default. """ - assert not File('/var/www/html').exists + assert not host.file('/var/www/html').exists @pytest.mark.parametrize('package', [ @@ -25,38 +25,38 @@ def test_apache_default_docroot_is_absent(File): 'sqlite3', 'supervisor', ]) -def test_securedrop_application_apt_dependencies(Package, package): +def test_securedrop_application_apt_dependencies(host, package): """ Ensure apt dependencies required to install `securedrop-app-code` are present. These should be pulled in automatically via apt, due to specification in Depends in package control file. """ - assert Package(package).is_installed + assert host.package(package).is_installed -def test_securedrop_application_test_locale(File, Sudo): +def test_securedrop_application_test_locale(host): """ Ensure SecureDrop DEFAULT_LOCALE is present. """ - securedrop_config = File("{}/config.py".format( + securedrop_config = host.file("{}/config.py".format( securedrop_test_vars.securedrop_code)) - with Sudo(): + with host.sudo(): assert securedrop_config.is_file assert securedrop_config.contains("^DEFAULT_LOCALE") assert securedrop_config.content.count("DEFAULT_LOCALE") == 1 -def test_securedrop_application_test_journalist_key(File, Sudo): +def test_securedrop_application_test_journalist_key(host): """ Ensure the SecureDrop Application GPG public key file is present. This is a test-only pubkey provided in the repository strictly for testing. """ - pubkey_file = File("{}/test_journalist_key.pub".format( + pubkey_file = host.file("{}/test_journalist_key.pub".format( securedrop_test_vars.securedrop_data)) - # Sudo is only necessary when testing against app hosts, since the + # sudo is only necessary when testing against app hosts, since the # permissions are tighter. Let's elevate privileges so we're sure # we can read the correct file attributes and test them. - with Sudo(): + with host.sudo(): assert pubkey_file.is_file assert pubkey_file.user == "root" assert pubkey_file.group == "root" @@ -64,9 +64,9 @@ def test_securedrop_application_test_journalist_key(File, Sudo): # Let's make sure the corresponding fingerprint is specified # in the SecureDrop app configuration. - securedrop_config = File("{}/config.py".format( + securedrop_config = host.file("{}/config.py".format( securedrop_test_vars.securedrop_code)) - with Sudo(): + with host.sudo(): assert securedrop_config.is_file assert securedrop_config.user == \ securedrop_test_vars.securedrop_user @@ -77,15 +77,15 @@ def test_securedrop_application_test_journalist_key(File, Sudo): "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$") -def test_securedrop_application_sqlite_db(File, Sudo): +def test_securedrop_application_sqlite_db(host): """ Ensure sqlite database exists for application. The database file should be created by Ansible on first run. """ - # Sudo is necessary under the App hosts, which have restrictive file + # sudo is necessary under the App hosts, which have restrictive file # permissions on the doc root. Not technically necessary under dev host. - with Sudo(): - f = File("{}/db.sqlite".format(securedrop_test_vars.securedrop_data)) + with host.sudo(): + f = host.file("{}/db.sqlite".format(securedrop_test_vars.securedrop_data)) assert f.is_file assert f.user == securedrop_test_vars.securedrop_user assert f.group == securedrop_test_vars.securedrop_user diff --git a/molecule/testinfra/staging/app-code/test_xvfb.py b/molecule/testinfra/staging/app-code/test_xvfb.py --- a/molecule/testinfra/staging/app-code/test_xvfb.py +++ b/molecule/testinfra/staging/app-code/test_xvfb.py @@ -1,40 +1,35 @@ testinfra_hosts = ["app-staging"] -def test_xvfb_is_installed(Package): +def test_xvfb_is_installed(host): """ Ensure apt requirements for Xvfb are present. """ - assert Package("xvfb").is_installed + assert host.package("xvfb").is_installed -def test_firefox_is_installed(Package, Command): +def test_firefox_is_installed(host): """ The app test suite requires a very specific version of Firefox, for compatibility with Selenium. Make sure to check the explicit version of Firefox, not just that any version of Firefox is installed. """ - p = Package("firefox") + p = host.package("firefox") assert p.is_installed - c = Command("firefox --version") + c = host.run("firefox --version") # Reminder: the rstrip is only necessary for local-context actions, # but it's a fine practice in all contexts. assert c.stdout.rstrip() == "Mozilla Firefox 46.0.1" -def test_xvfb_service_config_trusty(host): +def _xvfb_service_config_trusty(host): """ Ensure xvfb service configuration file is present. - Using Sudo context manager because the expected mode is 700. + Using sudo context manager because the expected mode is 700. Not sure it's really necessary to have this script by 700; 755 sounds sufficient. """ - # We're checking the upstart/sysv-style init script, which is only - # relevant for Trusty. - if host.system_info.codename == "xenial": - return True - with host.sudo(): f = host.file("/etc/init.d/xvfb") assert f.is_file @@ -79,20 +74,49 @@ def test_xvfb_service_config_trusty(host): assert f.content.rstrip() == xvfb_init_content -def test_xvfb_service_enabled_trusty(host): +def _xvfb_service_config_xenial(host): + """ + Validate the service config for xvfb under Xenial, using + systemd unit files. + """ + f = host.file("/etc/systemd/system/xvfb.service") + assert f.exists + xvfb_systemd_config = """ +[Unit] +Description=X Virtual Frame Buffer Service +After=network.target + +[Service] +ExecStart=/usr/bin/Xvfb :1 -screen 0 1024x768x24 -ac +extension GLX +render -noreset + +[Install] +WantedBy=multi-user.target +""".lstrip().rstrip() + assert f.content.rstrip() == xvfb_systemd_config + + +def test_xvfb_service_config(host): + """ + Validate the service config for Xvfb. + + Calls separate functions per platform, to accommodate for init + script system divergence on e.g. Trusty & Xenial. + """ + if host.system_info.codename == "trusty": + _xvfb_service_config_trusty(host) + else: + _xvfb_service_config_xenial(host) + + +def _xvfb_service_enabled_trusty(host): """ Ensure xvfb is configured to start on boot via update-rc.d. The `-n` option to update-rc.d is dry-run. - Using Sudo context manager because the service file is mode 700. + Using sudo context manager because the service file is mode 700. Not sure it's really necessary to have this script by 700; 755 sounds sufficient. """ - # We're checking the upstart/sysv-style init script, which is only - # relevant for Trusty. - if host.system_info.codename == "xenial": - return True - with host.sudo(): c = host.command('update-rc.d -n xvfb defaults') assert c.rc == 0 @@ -100,12 +124,33 @@ def test_xvfb_service_enabled_trusty(host): assert wanted_text in c.stdout -def test_xvfb_display_config(File): +def _xvfb_service_enabled_xenial(host): + """ + Ensure xvfb is configured to start on boot, under Xenial. + """ + s = host.service("xvfb") + assert s.is_enabled + + +def test_xvfb_service_enabled(host): + """ + Ensure the xvfb service is configured to start on boot. + + Calls separate functions per platform, to accommodate upstart vs + sysv style init scripts. + """ + if host.system_info.codename == "trusty": + _xvfb_service_enabled_trusty(host) + else: + _xvfb_service_enabled_xenial(host) + + +def test_xvfb_display_config(host): """ Ensure DISPLAY environment variable is set on boot, for running headless tests via Xvfb. """ - f = File('/etc/profile.d/xvfb_display.sh') + f = host.file('/etc/profile.d/xvfb_display.sh') assert f.is_file assert oct(f.mode) == "0444" assert f.user == "root" @@ -113,20 +158,36 @@ def test_xvfb_display_config(File): assert f.contains("export DISPLAY=:1\n") -def test_xvfb_service_running_trusty(host): +def test_xvfb_service_running(host): """ Ensure that xvfb service is running. + Calls separate functions per platform, to accommodate for upstart/sysv + style init scripts. + """ + if host.system_info.codename == "trusty": + _xvfb_service_running_trusty(host) + else: + _xvfb_service_running_xenial(host) + + +def _xvfb_service_running_xenial(host): + """ + Ensure xvfb is running under Xenial. + """ + s = host.service("xvfb") + assert s.is_running + + +def _xvfb_service_running_trusty(host): + """ + Ensure xvfb is running under Trusty. + We can't use the Service module because it expects a "status" subcommand for the init script, and our custom version doesn't have one. So let's make sure the process is running. """ - # We're checking the upstart/sysv-style init script, which is only - # relevant for Trusty. - if host.system_info.codename == "xenial": - return True - - # Sudo isn't necessary to read out of /proc on development, but is + # sudo isn't necessary to read out of /proc on development, but is # required when running under Grsecurity, which app-staging does. # So let's escalate privileges to ensure we can determine service state. with host.sudo(): diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -23,11 +23,11 @@ # Test is not DRY; haven't figured out how to parametrize on # multiple inputs, so explicitly redeclaring test logic. @pytest.mark.parametrize("header", wanted_apache_headers) -def test_apache_headers_journalist_interface(File, header): +def test_apache_headers_journalist_interface(host, header): """ Test for expected headers in Document Interface vhost config. """ - f = File("/etc/apache2/sites-available/journalist.conf") + f = host.file("/etc/apache2/sites-available/journalist.conf") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -104,7 +104,7 @@ def test_apache_headers_journalist_interface(File, header): 'ErrorLog /var/log/apache2/journalist-error.log', 'CustomLog /var/log/apache2/journalist-access.log combined', ]) -def test_apache_config_journalist_interface(File, apache_opt): +def test_apache_config_journalist_interface(host, apache_opt): """ Ensure the necessary Apache settings for serving the application are in place. Some values will change according to the host, @@ -113,7 +113,7 @@ def test_apache_config_journalist_interface(File, apache_opt): These checks apply only to the Document Interface, used by Journalists. """ - f = File("/etc/apache2/sites-available/journalist.conf") + f = host.file("/etc/apache2/sites-available/journalist.conf") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -122,16 +122,16 @@ def test_apache_config_journalist_interface(File, apache_opt): assert re.search(regex, f.content, re.M) -def test_apache_journalist_interface_vhost(File): +def test_apache_journalist_interface_vhost(host): """ Ensure the document root is configured with correct access restrictions for serving Journalist Interface application code. """ - f = File("/etc/apache2/sites-available/journalist.conf") + f = host.file("/etc/apache2/sites-available/journalist.conf") assert common_apache2_directory_declarations in f.content -def test_apache_logging_journalist_interface(File, Command, Sudo): +def test_apache_logging_journalist_interface(host): """ Check that logging is configured correctly for the Journalist Interface. The actions of Journalists are logged by the system, so that an Admin can @@ -140,15 +140,15 @@ def test_apache_logging_journalist_interface(File, Command, Sudo): Logs were broken for some period of time, logging only "combined" to the logfile, rather than the combined LogFormat intended. """ - # Sudo is necessary because /var/log/apache2 is mode 0750. - with Sudo(): - f = File("/var/log/apache2/journalist-access.log") + # sudo is necessary because /var/log/apache2 is mode 0750. + with host.sudo(): + f = host.file("/var/log/apache2/journalist-access.log") assert f.is_file if f.size == 0: # If the file is empty, the Journalist Interface hasn't been used # yet, so make a quick GET request local to the host so we can # validate the log entry. - Command.check_output("curl http://127.0.0.1:8080") + host.check_output("curl http://127.0.0.1:8080") assert f.size > 0 # Make sure something was logged. # LogFormat declaration was missing, so track regressions that log diff --git a/molecule/testinfra/staging/app/apache/test_apache_service.py b/molecule/testinfra/staging/app/apache/test_apache_service.py --- a/molecule/testinfra/staging/app/apache/test_apache_service.py +++ b/molecule/testinfra/staging/app/apache/test_apache_service.py @@ -9,12 +9,12 @@ "source", "journalist", ]) -def test_apache_enabled_sites(Command, Sudo, apache_site): +def test_apache_enabled_sites(host, apache_site): """ Ensure the Source and Journalist interfaces are enabled. """ - with Sudo(): - c = Command("/usr/sbin/a2query -s {}".format(apache_site)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -s {}".format(apache_site)) assert "{} (enabled".format(apache_site) in c.stdout assert c.rc == 0 @@ -22,32 +22,32 @@ def test_apache_enabled_sites(Command, Sudo, apache_site): @pytest.mark.parametrize("apache_site", [ "000-default", ]) -def test_apache_disabled_sites(Command, apache_site): +def test_apache_disabled_sites(host, apache_site): """ Ensure the default HTML document root is disabled. """ - c = Command("a2query -s {}".format(apache_site)) + c = host.run("a2query -s {}".format(apache_site)) assert "No site matches {} (disabled".format(apache_site) in c.stderr assert c.rc == 32 -def test_apache_service(Service, Sudo): +def test_apache_service(host): """ Ensure Apache service is running. """ - # Sudo is necessary to run `service apache2 status`, otherwise + # sudo is necessary to run `service apache2 status`, otherwise # the service is falsely reported as not running. - with Sudo(): - s = Service("apache2") + with host.sudo(): + s = host.service("apache2") assert s.is_running assert s.is_enabled -def test_apache_user(User): +def test_apache_user(host): """ Ensure user account for running application code is configured correctly. """ - u = User("www-data") + u = host.user("www-data") assert u.exists assert u.home == "/var/www" assert u.shell == "/usr/sbin/nologin" @@ -57,14 +57,14 @@ def test_apache_user(User): "80", "8080", ]) -def test_apache_listening(Socket, Sudo, port): +def test_apache_listening(host, port): """ Ensure Apache is listening on proper ports and interfaces. In staging, expect the service to be bound to 0.0.0.0, but in prod, it should be restricted to 127.0.0.1. """ - # Sudo is necessary to read from /proc/net/tcp. - with Sudo(): - s = Socket("tcp://{}:{}".format( + # sudo is necessary to read from /proc/net/tcp. + with host.sudo(): + s = host.socket("tcp://{}:{}".format( securedrop_test_vars.apache_listening_address, port)) assert s.is_listening diff --git a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py @@ -6,11 +6,11 @@ @pytest.mark.parametrize("header", securedrop_test_vars.wanted_apache_headers) -def test_apache_headers_source_interface(File, header): +def test_apache_headers_source_interface(host, header): """ Test for expected headers in Source Interface vhost config. """ - f = File("/etc/apache2/sites-available/source.conf") + f = host.file("/etc/apache2/sites-available/source.conf") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -46,7 +46,7 @@ def test_apache_headers_source_interface(File, header): 'ErrorDocument 500 /notfound', "ErrorLog {}".format(securedrop_test_vars.apache_source_log), ]) -def test_apache_config_source_interface(File, apache_opt): +def test_apache_config_source_interface(host, apache_opt): """ Ensure the necessary Apache settings for serving the application are in place. Some values will change according to the host, @@ -55,7 +55,7 @@ def test_apache_config_source_interface(File, apache_opt): These checks apply only to the Source Interface, used by Sources. """ - f = File("/etc/apache2/sites-available/source.conf") + f = host.file("/etc/apache2/sites-available/source.conf") assert f.is_file assert f.user == "root" assert f.group == "root" diff --git a/molecule/testinfra/staging/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/molecule/testinfra/staging/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -9,31 +9,31 @@ "libapache2-mod-wsgi", "libapache2-mod-xsendfile", ]) -def test_apache_apt_packages(Package, package): +def test_apache_apt_packages(host, package): """ Ensure required Apache packages are installed. """ - assert Package(package).is_installed + assert host.package(package).is_installed -def test_apache_apt_packages_trusty(Package, SystemInfo): +def test_apache_apt_packages_trusty(host): """ Ensure required Apache packages are installed. Only checks Trusty-specific packages; other tests handle more general apt dependencies for Apache. """ # Skip if testing against Xenial - if SystemInfo.release != "trusty": + if host.system_info.codename == "xenial": return True - assert Package("apache2-mpm-worker").is_installed + assert host.package("apache2-mpm-worker").is_installed -def test_apache_security_config_deprecated(File): +def test_apache_security_config_deprecated(host): """ Ensure that /etc/apache2/security is absent, since it was setting redundant options already presentin /etc/apache2/apache2.conf. See #643 for discussion. """ - assert not File("/etc/apache2/security").exists + assert not host.file("/etc/apache2/security").exists @pytest.mark.parametrize("apache_opt", [ @@ -58,14 +58,14 @@ def test_apache_security_config_deprecated(File): 'ServerSignature Off', 'TraceEnable Off', ]) -def test_apache_config_settings(File, apache_opt): +def test_apache_config_settings(host, apache_opt): """ Check required Apache config settings for general server. These checks do not target individual interfaces, e.g. Source versus Document Interface, and instead apply to Apache more generally. """ - f = File("/etc/apache2/apache2.conf") + f = host.file("/etc/apache2/apache2.conf") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -77,7 +77,7 @@ def test_apache_config_settings(File, apache_opt): "80", "8080", ]) -def test_apache_ports_config(File, SystemInfo, port): +def test_apache_ports_config(host, port): """ Ensure Apache ports config items, which specify how the Source and Document Interfaces are configured to be served @@ -85,7 +85,7 @@ def test_apache_ports_config(File, SystemInfo, port): to permit port forwarding for local testing, but in production, they're restricted to localhost, for use over Tor. """ - f = File("/etc/apache2/ports.conf") + f = host.file("/etc/apache2/ports.conf") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -115,14 +115,14 @@ def test_apache_ports_config(File, SystemInfo, port): 'wsgi', 'xsendfile', ]) -def test_apache_modules_present(Command, Sudo, apache_module): +def test_apache_modules_present(host, apache_module): """ Ensure presence of required Apache modules. Application will not work correctly if these are missing. A separate test will check for disabled modules. """ - with Sudo(): - c = Command("/usr/sbin/a2query -m {}".format(apache_module)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -m {}".format(apache_module)) assert "{} (enabled".format(apache_module) in c.stdout assert c.rc == 0 @@ -134,14 +134,14 @@ def test_apache_modules_present(Command, Sudo, apache_module): 'env', 'status', ]) -def test_apache_modules_absent(Command, Sudo, apache_module): +def test_apache_modules_absent(host, apache_module): """ Ensure absence of unwanted Apache modules. Application does not require these modules, so they should be disabled to reduce attack surface. A separate test will check for disabled modules. """ - with Sudo(): - c = Command("/usr/sbin/a2query -m {}".format(apache_module)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -m {}".format(apache_module)) assert "No module matches {} (disabled".format(apache_module) in \ c.stderr assert c.rc == 32 @@ -149,7 +149,7 @@ def test_apache_modules_absent(Command, Sudo, apache_module): @pytest.mark.parametrize("logfile", securedrop_test_vars.allowed_apache_logfiles) -def test_apache_logfiles_present(File, Command, Sudo, logfile): +def test_apache_logfiles_present(host, logfile): """" Ensure that whitelisted Apache log files for the Source and Journalist Interfaces are present. In staging, we permit a "source-error" log, @@ -158,13 +158,13 @@ def test_apache_logfiles_present(File, Command, Sudo, logfile): Apache log directory. """ # We need elevated privileges to read files inside /var/log/apache2 - with Sudo(): - f = File(logfile) + with host.sudo(): + f = host.file(logfile) assert f.is_file assert f.user == "root" -def test_apache_logfiles_no_extras(Command, Sudo): +def test_apache_logfiles_no_extras(host): """ Ensure that no unwanted Apache logfiles are present. Complements the `test_apache_logfiles_present` config test. Here, we confirm that the @@ -172,7 +172,7 @@ def test_apache_logfiles_no_extras(Command, Sudo): on the Application Server, whether staging or prod. """ # We need elevated privileges to read files inside /var/log/apache2 - with Sudo(): - c = Command("find /var/log/apache2 -mindepth 1 | wc -l") + with host.sudo(): + c = host.run("find /var/log/apache2 -mindepth 1 | wc -l") assert int(c.stdout) == \ len(securedrop_test_vars.allowed_apache_logfiles) diff --git a/molecule/testinfra/staging/app/test_app_network.py b/molecule/testinfra/staging/app/test_app_network.py --- a/molecule/testinfra/staging/app/test_app_network.py +++ b/molecule/testinfra/staging/app/test_app_network.py @@ -9,16 +9,16 @@ securedrop_test_vars = pytest.securedrop_test_vars -def test_app_iptables_rules(SystemInfo, Command, Sudo): +def test_app_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison kwargs = dict( mon_ip=os.environ.get('MON_IP', securedrop_test_vars.mon_ip), - default_interface=Command.check_output("ip r | head -n 1 | " - "awk '{ print $5 }'"), - tor_user_id=Command.check_output("id -u debian-tor"), - securedrop_user_id=Command.check_output("id -u www-data"), - ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"), + default_interface=host.check_output("ip r | head -n 1 | " + "awk '{ print $5 }'"), + tor_user_id=host.check_output("id -u debian-tor"), + securedrop_user_id=host.check_output("id -u www-data"), + ssh_group_gid=host.check_output("getent group ssh | cut -d: -f3"), dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters @@ -32,9 +32,9 @@ def test_app_iptables_rules(SystemInfo, Command, Sudo): jinja_iptables = Template(io.open(iptables_file, 'r').read()) iptables_expected = jinja_iptables.render(**kwargs) - with Sudo(): + with host.sudo(): # Actually run the iptables scrape command - iptables = Command.check_output(iptables) + iptables = host.check_output(iptables) # print diff comparison (only shows up in pytests if test fails or # verbosity turned way up) for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'), diff --git a/molecule/testinfra/staging/app/test_apparmor.py b/molecule/testinfra/staging/app/test_apparmor.py --- a/molecule/testinfra/staging/app/test_apparmor.py +++ b/molecule/testinfra/staging/app/test_apparmor.py @@ -6,37 +6,37 @@ @pytest.mark.parametrize('pkg', ['apparmor', 'apparmor-utils']) -def test_apparmor_pkg(Package, pkg): +def test_apparmor_pkg(host, pkg): """ Apparmor package dependencies """ - assert Package(pkg).is_installed + assert host.package(pkg).is_installed -def test_apparmor_enabled(Command, Sudo): +def test_apparmor_enabled(host): """ Check that apparmor is enabled """ - with Sudo(): - assert Command("aa-status --enabled").rc == 0 + with host.sudo(): + assert host.run("aa-status --enabled").rc == 0 apache2_capabilities = [ - 'dac_override', - 'kill', - 'net_bind_service', - 'sys_ptrace' - ] + 'dac_override', + 'kill', + 'net_bind_service', + 'sys_ptrace' +] @pytest.mark.parametrize('cap', apache2_capabilities) -def test_apparmor_apache_capabilities(Command, cap): +def test_apparmor_apache_capabilities(host, cap): """ check for exact list of expected app-armor capabilities for apache2 """ - c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' " - "/etc/apparmor.d/usr.sbin.apache2") + c = host.run("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' " + "/etc/apparmor.d/usr.sbin.apache2") assert cap in c.stdout -def test_apparmor_apache_exact_capabilities(Command): +def test_apparmor_apache_exact_capabilities(host): """ ensure no extra capabilities are defined for apache2 """ - c = Command.check_output("grep -ic capability " - "/etc/apparmor.d/usr.sbin.apache2") + c = host.check_output("grep -ic capability " + "/etc/apparmor.d/usr.sbin.apache2") assert str(len(apache2_capabilities)) == c @@ -44,74 +44,72 @@ def test_apparmor_apache_exact_capabilities(Command): @pytest.mark.parametrize('cap', tor_capabilities) -def test_apparmor_tor_capabilities(Command, cap): +def test_apparmor_tor_capabilities(host, cap): """ check for exact list of expected app-armor capabilities for tor """ - c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && " - "say $1\' /etc/apparmor.d/usr.sbin.tor") + c = host.run("perl -nE \'/^\s+capability\s+(\w+),$/ && " + "say $1\' /etc/apparmor.d/usr.sbin.tor") assert cap in c.stdout -def test_apparmor_tor_exact_capabilities(Command): +def test_apparmor_tor_exact_capabilities(host): """ ensure no extra capabilities are defined for tor """ - c = Command.check_output("grep -ic capability " - "/etc/apparmor.d/usr.sbin.tor") + c = host.check_output("grep -ic capability " + "/etc/apparmor.d/usr.sbin.tor") assert str(len(tor_capabilities)) == c -enforced_profiles = [ - 'ntpd', - 'apache2', - 'tcpdump', - 'tor'] - - [email protected]('profile', enforced_profiles) -def test_apparmor_ensure_not_disabled(File, Sudo, profile): - """ Explicitly check that enforced profiles are NOT in - /etc/apparmor.d/disable - Polling aa-status only checks the last config that was loaded, - this ensures it wont be disabled on reboot. [email protected]('profile', [ + 'ntpd', + 'apache2', + 'tcpdump', + 'tor', +]) +def test_apparmor_ensure_not_disabled(host, profile): """ - f = File("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile)) - with Sudo(): + Explicitly check that enforced profiles are NOT in /etc/apparmor.d/disable + Polling aa-status only checks the last config that was loaded, + this ensures it wont be disabled on reboot. + """ + f = host.file("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile)) + with host.sudo(): assert not f.exists @pytest.mark.parametrize('complain_pkg', sdvars.apparmor_complain) -def test_app_apparmor_complain(Command, Sudo, complain_pkg): +def test_app_apparmor_complain(host, complain_pkg): """ Ensure app-armor profiles are in complain mode for staging """ - with Sudo(): + with host.sudo(): awk = ("awk '/[0-9]+ profiles.*complain." "/{flag=1;next}/^[0-9]+.*/{flag=0}flag'") - c = Command.check_output("aa-status | {}".format(awk)) + c = host.check_output("aa-status | {}".format(awk)) assert complain_pkg in c -def test_app_apparmor_complain_count(Command, Sudo): +def test_app_apparmor_complain_count(host): """ Ensure right number of app-armor profiles are in complain mode """ - with Sudo(): - c = Command.check_output("aa-status --complaining") + with host.sudo(): + c = host.check_output("aa-status --complaining") assert c == str(len(sdvars.apparmor_complain)) @pytest.mark.parametrize('aa_enforced', sdvars.apparmor_enforce) -def test_apparmor_enforced(Command, Sudo, aa_enforced): +def test_apparmor_enforced(host, aa_enforced): awk = ("awk '/[0-9]+ profiles.*enforce./" "{flag=1;next}/^[0-9]+.*/{flag=0}flag'") - with Sudo(): - c = Command.check_output("aa-status | {}".format(awk)) + with host.sudo(): + c = host.check_output("aa-status | {}".format(awk)) assert aa_enforced in c -def test_apparmor_total_profiles(Command, Sudo): +def test_apparmor_total_profiles(host): """ Ensure number of total profiles is sum of enforced and complaining profiles """ - with Sudo(): + with host.sudo(): total_expected = str((len(sdvars.apparmor_enforce) + len(sdvars.apparmor_complain))) # Trusty has ~10, Xenial about ~20 profiles, so let's expect # *at least* the sum. - assert Command.check_output("aa-status --profiled") >= total_expected + assert host.check_output("aa-status --profiled") >= total_expected def test_aastatus_unconfined(host): @@ -132,8 +130,8 @@ def test_aastatus_unconfined(host): assert unconfined_chk in aa_status_output -def test_aa_no_denies_in_syslog(host, File, Sudo): +def test_aa_no_denies_in_syslog(host): """ Ensure that there are no apparmor denials in syslog """ - with Sudo(): - f = File("/var/log/syslog") + with host.sudo(): + f = host.file("/var/log/syslog") assert 'apparmor="DENIED"' not in f.content_string diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py --- a/molecule/testinfra/staging/app/test_appenv.py +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -5,16 +5,16 @@ @pytest.mark.parametrize('exp_pip_pkg', sdvars.pip_deps) -def test_app_pip_deps(PipPackage, exp_pip_pkg): +def test_app_pip_deps(host, exp_pip_pkg): """ Ensure pip dependencies are installed """ - pip = PipPackage.get_packages() + pip = host.pip_package.get_packages() assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version'] -def test_app_wsgi(File, Sudo): +def test_app_wsgi(host): """ ensure logging is enabled for source interface in staging """ - f = File("/var/www/source.wsgi") - with Sudo(): + f = host.file("/var/www/source.wsgi") + with host.sudo(): assert f.is_file assert oct(f.mode) == "0640" assert f.user == 'www-data' @@ -23,58 +23,57 @@ def test_app_wsgi(File, Sudo): assert f.contains("^logging\.basicConfig(stream=sys\.stderr)$") -def test_pidfile(File): +def test_pidfile(host): """ ensure there are no pid files """ - assert not File('/tmp/journalist.pid').exists - assert not File('/tmp/source.pid').exists + assert not host.file('/tmp/journalist.pid').exists + assert not host.file('/tmp/source.pid').exists @pytest.mark.parametrize('app_dir', sdvars.app_directories) -def test_app_directories(File, Sudo, app_dir): +def test_app_directories(host, app_dir): """ ensure securedrop app directories exist with correct permissions """ - f = File(app_dir) - with Sudo(): + f = host.file(app_dir) + with host.sudo(): assert f.is_directory assert f.user == sdvars.securedrop_user assert f.group == sdvars.securedrop_user assert oct(f.mode) == "0700" -def test_app_code_pkg(Package): +def test_app_code_pkg(host): """ ensure securedrop-app-code package is installed """ - assert Package("securedrop-app-code").is_installed + assert host.package("securedrop-app-code").is_installed -def test_gpg_key_in_keyring(Command, Sudo): +def test_gpg_key_in_keyring(host): """ ensure test gpg key is present in app keyring """ - with Sudo(sdvars.securedrop_user): - c = Command("gpg --homedir /var/lib/securedrop/keys " - "--list-keys 28271441") + with host.sudo(sdvars.securedrop_user): + c = host.run("gpg --homedir /var/lib/securedrop/keys " + "--list-keys 28271441") assert "pub 4096R/28271441 2013-10-12" in c.stdout -def test_ensure_logo(File, Sudo): +def test_ensure_logo(host): """ ensure default logo header file exists """ - f = File("{}/static/i/logo.png".format(sdvars.securedrop_code)) - with Sudo(): + f = host.file("{}/static/i/logo.png".format(sdvars.securedrop_code)) + with host.sudo(): assert oct(f.mode) == "0644" assert f.user == sdvars.securedrop_user assert f.group == sdvars.securedrop_user -def test_securedrop_tmp_clean_cron(Command, Sudo): +def test_securedrop_tmp_clean_cron(host): """ Ensure securedrop tmp clean cron job in place """ - with Sudo(): - cronlist = Command("crontab -l").stdout - cronjob = "@daily {}/manage.py clean-tmp".format( - sdvars.securedrop_code) + with host.sudo(): + cronlist = host.run("crontab -l").stdout + cronjob = "@daily {}/manage.py clean-tmp".format(sdvars.securedrop_code) assert cronjob in cronlist -def test_app_workerlog_dir(File, Sudo): +def test_app_workerlog_dir(host): """ ensure directory for worker logs is present """ - f = File('/var/log/securedrop_worker') - with Sudo(): + f = host.file('/var/log/securedrop_worker') + with host.sudo(): assert f.is_directory assert f.user == "root" assert f.group == "root" diff --git a/molecule/testinfra/staging/app/test_ossec_agent.py b/molecule/testinfra/staging/app/test_ossec_agent.py --- a/molecule/testinfra/staging/app/test_ossec_agent.py +++ b/molecule/testinfra/staging/app/test_ossec_agent.py @@ -6,9 +6,9 @@ testinfra_hosts = ["app", "app-staging"] -def test_hosts_files(File, SystemInfo): +def test_hosts_files(host): """ Ensure host files mapping are in place """ - f = File('/etc/hosts') + f = host.file('/etc/hosts') mon_ip = os.environ.get('MON_IP', sdvars.mon_ip) mon_host = sdvars.monitor_hostname @@ -19,27 +19,27 @@ def test_hosts_files(File, SystemInfo): mon_host)) -def test_hosts_duplicate(Command): +def test_hosts_duplicate(host): """ Regression test for duplicate entries """ - assert Command.check_output("uniq --repeated /etc/hosts") == "" + assert host.check_output("uniq --repeated /etc/hosts") == "" -def test_ossec_agent_installed(Package): +def test_ossec_agent_installed(host): """ Check that ossec-agent package is present """ - assert Package("securedrop-ossec-agent").is_installed + assert host.package("securedrop-ossec-agent").is_installed # Permissions don't match between Ansible and OSSEC deb packages postinst. @pytest.mark.xfail -def test_ossec_keyfile_present(File, Command, Sudo, SystemInfo): +def test_ossec_keyfile_present(host): """ ensure client keyfile for ossec-agent is present """ pattern = "^1024 {} {} [0-9a-f]{{64}}$".format( sdvars.app_hostname, os.environ.get('APP_IP', sdvars.app_ip)) regex = re.compile(pattern) - with Sudo(): - f = File("/var/ossec/etc/client.keys") + with host.sudo(): + f = host.file("/var/ossec/etc/client.keys") assert f.exists assert oct(f.mode) == "0644" assert f.user == "root" diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -16,7 +16,7 @@ def test_tor_packages(Package, package): assert Package(package).is_installed -def test_tor_service_running_trusty(host): +def _tor_service_running_trusty(host): """ Ensure tor is running and enabled. Tor is required for SSH access, so it must be enabled to start on boot. Checks upstart/sysv-style @@ -27,9 +27,6 @@ def test_tor_service_running_trusty(host): # `initctl` command. The tor service is handled via a SysV-style init # script, so let's just shell out and verify the running and enabled # states explicitly. - if host.system_info.codename == "xenial": - return True - with host.sudo(): assert host.check_output("service tor status") == \ " * tor is running" @@ -45,31 +42,37 @@ def test_tor_service_running_trusty(host): assert t.linked_to == "/etc/init.d/tor" -def test_tor_service_running_xenial(host): +def _tor_service_running_xenial(host): """ Ensure tor is running and enabled. Tor is required for SSH access, so it must be enabled to start on boot. Checks systemd-style services, used by Xenial. """ - # TestInfra tries determine the service manager intelligently, and - # inappropriately assumes Upstart on Trusty, due to presence of the - # `initctl` command. The tor service is handled via a SysV-style init - # script, so let's just shell out and verify the running and enabled - # states explicitly. - if host.system_info.codename == "trusty": - return True - s = host.service("tor") assert s.is_running assert s.is_enabled +def test_tor_service_running(host): + """ + Ensure tor is running and enabled. Tor is required for SSH access, + so it must be enabled to start on boot. + + Calls a separate function depending on platform, to handle nuances + of upstart vs sysv init systems. + """ + if host.system_info.codename == "trusty": + _tor_service_running_trusty(host) + else: + _tor_service_running_xenial(host) + + @pytest.mark.parametrize('torrc_option', [ 'SocksPort 0', 'SafeLogging 1', 'RunAsDaemon 1', ]) -def test_tor_torrc_options(File, torrc_option): +def test_tor_torrc_options(host, torrc_option): """ Check for required options in the system Tor config file. These options should be present regardless of machine role, @@ -77,21 +80,21 @@ def test_tor_torrc_options(File, torrc_option): Separate tests will check for specific hidden services. """ - f = File("/etc/tor/torrc") + f = host.file("/etc/tor/torrc") assert f.is_file assert f.user == "debian-tor" assert oct(f.mode) == "0644" assert f.contains("^{}$".format(torrc_option)) -def test_tor_torrc_sandbox(File): +def test_tor_torrc_sandbox(host): """ Check that the `Sandbox 1` declaration is not present in the torrc. The torrc manpage states this option is experimental, and although we use it already on Tails workstations, further testing is required before we push it out to servers. See issues #944 and #1969. """ - f = File("/etc/tor/torrc") + f = host.file("/etc/tor/torrc") # Only `Sandbox 1` will enable, but make sure there are zero occurrances # of "Sandbox", otherwise we may have a regression somewhere. assert not f.contains("^.*Sandbox.*$") diff --git a/molecule/testinfra/staging/app/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/molecule/testinfra/staging/app/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -7,12 +7,12 @@ @pytest.mark.parametrize('tor_service', sdvars.tor_services) -def test_tor_service_directories(File, Sudo, tor_service): +def test_tor_service_directories(host, tor_service): """ Check mode and ownership on Tor service directories. """ - with Sudo(): - f = File("/var/lib/tor/services/{}".format(tor_service['name'])) + with host.sudo(): + f = host.file("/var/lib/tor/services/{}".format(tor_service['name'])) assert f.is_directory assert oct(f.mode) == "0700" assert f.user == "debian-tor" @@ -20,19 +20,18 @@ def test_tor_service_directories(File, Sudo, tor_service): @pytest.mark.parametrize('tor_service', sdvars.tor_services) -def test_tor_service_hostnames(File, Sudo, tor_service): +def test_tor_service_hostnames(host, tor_service): """ Check contents of tor service hostname file. For normal Hidden Services, the file should contain only hostname (.onion URL). For Authenticated Hidden Services, it should also contain the HidServAuth cookie. """ - # Declare regex only for THS; we'll build regex for ATHS only if # necessary, since we won't have the required values otherwise. ths_hostname_regex = "[a-z0-9]{16}\.onion" - with Sudo(): - f = File("/var/lib/tor/services/{}/hostname".format( + with host.sudo(): + f = host.file("/var/lib/tor/services/{}/hostname".format( tor_service['name'])) assert f.is_file assert oct(f.mode) == "0600" diff --git a/molecule/testinfra/staging/common/test_cron_apt.py b/molecule/testinfra/staging/common/test_cron_apt.py --- a/molecule/testinfra/staging/common/test_cron_apt.py +++ b/molecule/testinfra/staging/common/test_cron_apt.py @@ -9,7 +9,7 @@ 'cron-apt', 'ntp' ]) -def test_cron_apt_dependencies(Package, dependency): +def test_cron_apt_dependencies(host, dependency): """ Ensure critical packages are installed. If any of these are missing, the system will fail to receive automatic updates. @@ -20,14 +20,14 @@ def test_cron_apt_dependencies(Package, dependency): problematic. With better procedures in place regarding apt repo maintenance, we can ensure the field is populated going forward. """ - assert Package(dependency).is_installed + assert host.package(dependency).is_installed -def test_cron_apt_config(File): +def test_cron_apt_config(host): """ Ensure custom cron-apt config file is present. """ - f = File('/etc/cron-apt/config') + f = host.file('/etc/cron-apt/config') assert f.is_file assert f.user == "root" assert oct(f.mode) == "0644" @@ -59,12 +59,12 @@ def test_cron_apt_repo_list(host, repo): assert f.contains(repo_regex) -def test_cron_apt_repo_config_update(File): +def test_cron_apt_repo_config_update(host): """ Ensure cron-apt updates repos from the security.list config. """ - f = File('/etc/cron-apt/action.d/0-update') + f = host.file('/etc/cron-apt/action.d/0-update') assert f.is_file assert f.user == "root" assert oct(f.mode) == "0644" @@ -74,12 +74,12 @@ def test_cron_apt_repo_config_update(File): assert f.contains('^{}$'.format(repo_config)) -def test_cron_apt_delete_vanilla_kernels(File): +def test_cron_apt_delete_vanilla_kernels(host): """ Ensure cron-apt removes generic linux image packages when installed. """ - f = File('/etc/cron-apt/action.d/9-remove') + f = host.file('/etc/cron-apt/action.d/9-remove') assert f.is_file assert f.user == "root" assert oct(f.mode) == "0644" @@ -89,11 +89,11 @@ def test_cron_apt_delete_vanilla_kernels(File): assert f.contains('^{}$'.format(command)) -def test_cron_apt_repo_config_upgrade(File): +def test_cron_apt_repo_config_upgrade(host): """ Ensure cron-apt upgrades packages from the security.list config. """ - f = File('/etc/cron-apt/action.d/5-security') + f = host.file('/etc/cron-apt/action.d/5-security') assert f.is_file assert f.user == "root" assert oct(f.mode) == "0644" @@ -105,11 +105,11 @@ def test_cron_apt_repo_config_upgrade(File): assert f.contains(re.escape(repo_config)) -def test_cron_apt_config_deprecated(File): +def test_cron_apt_config_deprecated(host): """ Ensure default cron-apt file to download all updates does not exist. """ - f = File('/etc/cron-apt/action.d/3-download') + f = host.file('/etc/cron-apt/action.d/3-download') assert not f.exists @@ -121,13 +121,13 @@ def test_cron_apt_config_deprecated(File): {'job': '0 5 * * * root /sbin/reboot', 'state': 'absent'}, ]) -def test_cron_apt_cron_jobs(File, cron_job): +def test_cron_apt_cron_jobs(host, cron_job): """ Check for correct cron job for upgrading all packages and rebooting. We'll also check for absence of previous versions of the cron job, to make sure those have been cleaned up via the playbooks. """ - f = File('/etc/cron.d/cron-apt') + f = host.file('/etc/cron.d/cron-apt') assert f.is_file assert f.user == "root" assert oct(f.mode) == "0644" @@ -139,7 +139,7 @@ def test_cron_apt_cron_jobs(File, cron_job): assert not f.contains(regex_job) -def test_cron_apt_all_packages_updated(Command): +def test_cron_apt_all_packages_updated(host): """ Ensure a safe-upgrade has already been run, by checking that no packages are eligible for upgrade currently. @@ -148,7 +148,7 @@ def test_cron_apt_all_packages_updated(Command): for use with Selenium. Therefore apt will report it's possible to upgrade Firefox, which we'll need to mark as "OK" in terms of the tests. """ - c = Command('aptitude --simulate -y safe-upgrade') + c = host.run('aptitude --simulate -y safe-upgrade') assert c.rc == 0 # Staging hosts will have locally built deb packages, marked as held. # Staging and development will have a version-locked Firefox pinned for diff --git a/molecule/testinfra/staging/common/test_fpf_apt_repo.py b/molecule/testinfra/staging/common/test_fpf_apt_repo.py --- a/molecule/testinfra/staging/common/test_fpf_apt_repo.py +++ b/molecule/testinfra/staging/common/test_fpf_apt_repo.py @@ -32,7 +32,7 @@ def test_fpf_apt_repo_present(host): assert f.contains(repo_regex) -def test_fpf_apt_repo_fingerprint(Command): +def test_fpf_apt_repo_fingerprint(host): """ Ensure the FPF apt repo has the correct fingerprint on the associated signing pubkey. The key changed in October 2016, so test for the @@ -40,7 +40,7 @@ def test_fpf_apt_repo_fingerprint(Command): `securedrop-keyring` package. """ - c = Command('apt-key finger') + c = host.run('apt-key finger') fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg --------------------------------------------- @@ -59,12 +59,12 @@ def test_fpf_apt_repo_fingerprint(Command): 'uid Freedom of the Press Foundation Master Signing Key', 'B89A 29DB 2128 160B 8E4B 1B4C BADD E0C7 FC9F 6818', ]) -def test_fpf_apt_repo_old_pubkeys_absent(Command, old_pubkey): +def test_fpf_apt_repo_old_pubkeys_absent(host, old_pubkey): """ Ensure that expired (or about-to-expire) public keys for the FPF apt repo are NOT present. Updates to the securedrop-keyring package should enforce clobbering of old pubkeys, and this check will confirm absence. """ - c = Command('apt-key finger') + c = host.run('apt-key finger') assert old_pubkey not in c.stdout diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -5,12 +5,12 @@ KERNEL_VERSION = pytest.securedrop_test_vars.grsec_version -def test_ssh_motd_disabled(File): +def test_ssh_motd_disabled(host): """ Ensure the SSH MOTD (Message of the Day) is disabled. Grsecurity balks at Ubuntu's default MOTD. """ - f = File("/etc/pam.d/sshd") + f = host.file("/etc/pam.d/sshd") assert f.is_file assert not f.contains("pam\.motd") @@ -21,13 +21,13 @@ def test_ssh_motd_disabled(File): 'paxctl', 'securedrop-grsec', ]) -def test_grsecurity_apt_packages(Package, package): +def test_grsecurity_apt_packages(host, package): """ Ensure the grsecurity-related apt packages are present on the system. Includes the FPF-maintained metapackage, as well as paxctl, for managing PaX flags on binaries. """ - assert Package(package).is_installed + assert host.package(package).is_installed @pytest.mark.parametrize("package", [ @@ -38,7 +38,7 @@ def test_grsecurity_apt_packages(Package, package): '^linux-image-.*generic$', '^linux-headers-.*', ]) -def test_generic_kernels_absent(Command, package): +def test_generic_kernels_absent(host, package): """ Ensure the default Ubuntu-provided kernel packages are absent. In the past, conflicting version numbers have caused machines @@ -49,28 +49,28 @@ def test_generic_kernels_absent(Command, package): # Can't use the TestInfra Package module to check state=absent, # so let's check by shelling out to `dpkg -l`. Dpkg will automatically # honor simple regex in package names. - c = Command("dpkg -l {}".format(package)) + c = host.run("dpkg -l {}".format(package)) assert c.rc == 1 error_text = "dpkg-query: no packages found matching {}".format(package) assert c.stderr == error_text -def test_grsecurity_lock_file(File): +def test_grsecurity_lock_file(host): """ Ensure system is rerunning a grsecurity kernel by testing for the `grsec_lock` file, which is automatically created by grsecurity. """ - f = File("/proc/sys/kernel/grsecurity/grsec_lock") + f = host.file("/proc/sys/kernel/grsecurity/grsec_lock") assert oct(f.mode) == "0600" assert f.user == "root" assert f.size == 0 -def test_grsecurity_kernel_is_running(Command): +def test_grsecurity_kernel_is_running(host): """ Make sure the currently running kernel is specific grsec kernel. """ - c = Command('uname -r') + c = host.run('uname -r') assert c.stdout.endswith('-grsec') assert c.stdout == '{}-grsec'.format(KERNEL_VERSION) @@ -80,13 +80,13 @@ def test_grsecurity_kernel_is_running(Command): ('kernel.grsecurity.rwxmap_logging', 0), ('vm.heap_stack_gap', 1048576), ]) -def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt): +def test_grsecurity_sysctl_options(host, sysctl_opt): """ Check that the grsecurity-related sysctl options are set correctly. In production the RWX logging is disabled, to reduce log noise. """ - with Sudo(): - assert Sysctl(sysctl_opt[0]) == sysctl_opt[1] + with host.sudo(): + assert host.sysctl(sysctl_opt[0]) == sysctl_opt[1] @pytest.mark.parametrize('paxtest_check', [ @@ -108,48 +108,47 @@ def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt): "Return to function (memcpy)", "Return to function (memcpy, PIE)", ]) -def test_grsecurity_paxtest(Command, Sudo, paxtest_check): +def test_grsecurity_paxtest(host, paxtest_check): """ Check that paxtest does not report anything vulnerable Requires the package paxtest to be installed. The paxtest package is currently being installed in the app-test role. """ - if Command.exists("/usr/bin/paxtest"): - with Sudo(): - c = Command("paxtest blackhat") + if host.exists("/usr/bin/paxtest"): + with host.sudo(): + c = host.run("paxtest blackhat") assert c.rc == 0 assert "Vulnerable" not in c.stdout regex = "^{}\s*:\sKilled$".format(re.escape(paxtest_check)) assert re.search(regex, c.stdout) -def test_grub_pc_marked_manual(Command): +def test_grub_pc_marked_manual(host): """ Ensure the `grub-pc` packaged is marked as manually installed. This is necessary for VirtualBox with Vagrant. """ - c = Command('apt-mark showmanual grub-pc') + c = host.run('apt-mark showmanual grub-pc') assert c.rc == 0 assert c.stdout == "grub-pc" -def test_apt_autoremove(Command): +def test_apt_autoremove(host): """ Ensure old packages have been autoremoved. """ - c = Command('apt-get --dry-run autoremove') + c = host.run('apt-get --dry-run autoremove') assert c.rc == 0 assert "The following packages will be REMOVED" not in c.stdout [email protected](strict=True, - reason="PaX flags unset at install time, see issue #3916") [email protected](reason="PaX flags unset at install time, see issue #3916") @pytest.mark.parametrize("binary", [ "/usr/sbin/grub-probe", "/usr/sbin/grub-mkdevicemap", "/usr/bin/grub-script-check", ]) -def test_pax_flags(Command, File, binary): +def test_pax_flags(host, binary): """ Ensure PaX flags are set correctly on critical Grub binaries. These flags are maintained as part of a post-install kernel hook @@ -157,11 +156,11 @@ def test_pax_flags(Command, File, binary): the machine may fail to boot into a new kernel. """ - f = File("/etc/kernel/postinst.d/paxctl-grub") + f = host.file("/etc/kernel/postinst.d/paxctl-grub") assert f.is_file assert f.contains("^paxctl -zCE {}".format(binary)) - c = Command("paxctl -v {}".format(binary)) + c = host.run("paxctl -v {}".format(binary)) assert c.rc == 0 assert "- PaX flags: --------E--- [{}]".format(binary) in c.stdout diff --git a/molecule/testinfra/staging/common/test_ip6tables.py b/molecule/testinfra/staging/common/test_ip6tables.py --- a/molecule/testinfra/staging/common/test_ip6tables.py +++ b/molecule/testinfra/staging/common/test_ip6tables.py @@ -1,4 +1,4 @@ -def test_ip6tables_drop_everything(Command, Sudo): +def test_ip6tables_drop_everything(host): """ Ensure that all IPv6 packets are dropped by default. The IPv4 rules are more complicated, and tested separately. @@ -9,6 +9,6 @@ def test_ip6tables_drop_everything(Command, Sudo): -P OUTPUT DROP """.lstrip().rstrip() - with Sudo(): - c = Command.check_output("ip6tables -S") + with host.sudo(): + c = host.check_output("ip6tables -S") assert c == desired_ip6tables_output diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py --- a/molecule/testinfra/staging/common/test_system_hardening.py +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -21,21 +21,21 @@ ('net.ipv6.conf.default.disable_ipv6', 1), ('net.ipv6.conf.lo.disable_ipv6', 1), ]) -def test_sysctl_options(Sysctl, Sudo, sysctl_opt): +def test_sysctl_options(host, sysctl_opt): """ Ensure sysctl flags are set correctly. Most of these checks are disabling IPv6 and hardening IPv4, which is appropriate due to the heavy use of Tor. """ - with Sudo(): - assert Sysctl(sysctl_opt[0]) == sysctl_opt[1] + with host.sudo(): + assert host.sysctl(sysctl_opt[0]) == sysctl_opt[1] -def test_dns_setting(File): +def test_dns_setting(host): """ Ensure DNS service is hard-coded in resolv.conf config. """ - f = File('/etc/resolvconf/resolv.conf.d/base') + f = host.file('/etc/resolvconf/resolv.conf.d/base') assert f.is_file assert f.user == "root" assert f.group == "root" @@ -47,36 +47,44 @@ def test_dns_setting(File): 'bluetooth', 'iwlwifi', ]) -def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module): +def test_blacklisted_kernel_modules(host, kernel_module): """ Test that unwanted kernel modules are blacklisted on the system. Mostly these checks are defense-in-depth approaches to ensuring that wireless interfaces will not work. """ - with Sudo(): - assert kernel_module not in Command("lsmod").stdout + with host.sudo(): + c = host.run("lsmod") + assert kernel_module not in c.stdout - f = File("/etc/modprobe.d/blacklist.conf") + f = host.file("/etc/modprobe.d/blacklist.conf") assert f.contains("^blacklist {}$".format(kernel_module)) -def test_swap_disabled(Command): +def test_swap_disabled(host): """ Ensure swap space is disabled. Prohibit writing memory to swapfiles to reduce the threat of forensic analysis leaking any sensitive info. """ - hostname = Command.check_output('hostname') + hostname = host.check_output('hostname') # Mon doesn't have swap disabled yet - if not hostname.startswith('mon'): - c = Command.check_output('swapon --summary') - # A leading slash will indicate full path to a swapfile. - assert not re.search("^/", c, re.M) + if hostname.startswith('mon'): + return True + + c = host.check_output('swapon --summary') + # A leading slash will indicate full path to a swapfile. + assert not re.search("^/", c, re.M) + + if host.system_info.codename == "trusty": # Expect that ONLY the headers will be present in the output. rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority") + else: # On Xenial, swapon 2.27.1 shows blank output, with no headers, so # check for empty output as confirmation of no swap. - assert any((re.search(rgx, c), c == "")) + rgx = re.compile("^$") + + assert re.search(rgx, c) def test_twofactor_disabled_on_tty(host): @@ -109,16 +117,19 @@ def test_sshd_config(host, sshd_opts): assert line in sshd_config_file [email protected]('filenames', [ [email protected]('logfile', [ '/var/log/auth.log', '/var/log/syslog', ]) -def test_pam_(host, filenames, Command, Sudo): +def test_no_ecrypt_messages_in_logs(host, logfile): """ Ensure pam_ecryptfs is removed from /etc/pam.d/common-auth : not only is no longer needed, it causes error messages (see issue #3963) """ error_message = "pam_ecryptfs.so: cannot open shared object file" - with Sudo(): - log_file = host.file(filenames).content_string - assert error_message not in log_file + with host.sudo(): + f = host.file(logfile) + # Not using `f.contains(<pattern>)` because that'd cause the sought + # string to make it into syslog as a side-effect of the testinfra + # invocation, causing subsequent test runs to report failure. + assert error_message not in f.content_string diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -1,12 +1,12 @@ import re -def test_sudoers_config(File, Sudo): +def test_sudoers_config(host): """ Check sudoers config for passwordless sudo via group membership, as well as environment-related hardening. """ - f = File("/etc/sudoers") + f = host.file("/etc/sudoers") assert f.is_file assert f.user == "root" assert f.group == "root" @@ -14,7 +14,7 @@ def test_sudoers_config(File, Sudo): # Restrictive file mode requires sudo for reading, so let's # read once and store the content in a var. - with Sudo(): + with host.sudo(): sudoers_config = f.content # Using re.search rather than `f.contains` since the basic grep @@ -30,7 +30,7 @@ def test_sudoers_config(File, Sudo): assert re.search('Defaults:%sudo\s+!requiretty', sudoers_config, re.M) -def test_sudoers_tmux_env(File): +def test_sudoers_tmux_env(host): """ Ensure SecureDrop-specific bashrc additions are present. This checks for automatic tmux start on interactive shells. @@ -38,7 +38,7 @@ def test_sudoers_tmux_env(File): the corresponding settings there. """ - f = File('/etc/profile.d/securedrop_additions.sh') + f = host.file('/etc/profile.d/securedrop_additions.sh') non_interactive_str = re.escape('[[ $- != *i* ]] && return') tmux_check = re.escape('test -z "$TMUX" && (tmux attach ||' ' tmux new-session)') @@ -50,7 +50,7 @@ def test_sudoers_tmux_env(File): assert f.contains(tmux_check) -def test_tmux_installed(Package): +def test_tmux_installed(host): """ Ensure the `tmux` package is present, since it's required for the user env. When running an interactive SSH session over Tor, tmux should be started @@ -58,10 +58,10 @@ def test_tmux_installed(Package): unexpectedly, as sometimes happens over Tor. The Admin will be able to reconnect to the running tmux session and review command output. """ - assert Package("tmux").is_installed + assert host.package("tmux").is_installed -def test_sudoers_tmux_env_deprecated(File): +def test_sudoers_tmux_env_deprecated(host): """ Previous version of the Ansible config set the tmux config in per-user ~/.bashrc, which was redundant. The config has @@ -72,5 +72,5 @@ def test_sudoers_tmux_env_deprecated(File): admin_user = "vagrant" - f = File("/home/{}/.bashrc".format(admin_user)) + f = host.file("/home/{}/.bashrc".format(admin_user)) assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$") diff --git a/molecule/testinfra/staging/mon/test_mon_network.py b/molecule/testinfra/staging/mon/test_mon_network.py --- a/molecule/testinfra/staging/mon/test_mon_network.py +++ b/molecule/testinfra/staging/mon/test_mon_network.py @@ -9,16 +9,16 @@ securedrop_test_vars = pytest.securedrop_test_vars -def test_mon_iptables_rules(SystemInfo, Command, Sudo): +def test_mon_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison kwargs = dict( app_ip=os.environ.get('APP_IP', securedrop_test_vars.app_ip), - default_interface=Command.check_output( + default_interface=host.check_output( "ip r | head -n 1 | awk '{ print $5 }'"), - tor_user_id=Command.check_output("id -u debian-tor"), - ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"), - postfix_user_id=Command.check_output("id -u postfix"), + tor_user_id=host.check_output("id -u debian-tor"), + ssh_group_gid=host.check_output("getent group ssh | cut -d: -f3"), + postfix_user_id=host.check_output("id -u postfix"), dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters @@ -32,9 +32,9 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo): jinja_iptables = Template(io.open(iptables_file, 'r').read()) iptables_expected = jinja_iptables.render(**kwargs) - with Sudo(): + with host.sudo(): # Actually run the iptables scrape command - iptables = Command.check_output(iptables) + iptables = host.check_output(iptables) # print diff comparison (only shows up in pytests if test fails or # verbosity turned way up) for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'), diff --git a/molecule/testinfra/staging/mon/test_ossec_ruleset.py b/molecule/testinfra/staging/mon/test_ossec_ruleset.py --- a/molecule/testinfra/staging/mon/test_ossec_ruleset.py +++ b/molecule/testinfra/staging/mon/test_ossec_ruleset.py @@ -9,19 +9,19 @@ @pytest.mark.parametrize('log_event', sdvars.log_events_without_ossec_alerts) -def test_ossec_false_positives_suppressed(Command, Sudo, log_event): - with Sudo(): - c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( - log_event["alert"])) +def test_ossec_false_positives_suppressed(host, log_event): + with host.sudo(): + c = host.run('echo "{}" | /var/ossec/bin/ossec-logtest'.format( + log_event["alert"])) assert "Alert to be generated" not in c.stderr @pytest.mark.parametrize('log_event', sdvars.log_events_with_ossec_alerts) -def test_ossec_expected_alerts_are_present(Command, Sudo, log_event): - with Sudo(): - c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( - log_event["alert"])) +def test_ossec_expected_alerts_are_present(host, log_event): + with host.sudo(): + c = host.run('echo "{}" | /var/ossec/bin/ossec-logtest'.format( + log_event["alert"])) assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == log_event["level"] diff --git a/molecule/testinfra/staging/mon/test_ossec_server.py b/molecule/testinfra/staging/mon/test_ossec_server.py --- a/molecule/testinfra/staging/mon/test_ossec_server.py +++ b/molecule/testinfra/staging/mon/test_ossec_server.py @@ -6,7 +6,7 @@ securedrop_test_vars = pytest.securedrop_test_vars -def test_ossec_connectivity(Command, Sudo): +def test_ossec_connectivity(host): """ Ensure ossec-server machine has active connection to the ossec-agent. The ossec service will report all available agents, and we can inspect @@ -15,8 +15,8 @@ def test_ossec_connectivity(Command, Sudo): desired_output = "{}-{} is available.".format( securedrop_test_vars.app_hostname, os.environ.get('APP_IP', securedrop_test_vars.app_ip)) - with Sudo(): - c = Command.check_output("/var/ossec/bin/list_agents -a") + with host.sudo(): + c = host.check_output("/var/ossec/bin/list_agents -a") assert c == desired_output @@ -26,7 +26,7 @@ def test_ossec_connectivity(Command, Sudo): '/var/ossec/etc/sslmanager.key', '/var/ossec/etc/sslmanager.cert', ]) -def test_ossec_keyfiles(File, Sudo, keyfile): +def test_ossec_keyfiles(host, keyfile): """ Ensure that the OSSEC transport key pair exists. These keys are used to protect the connection between the ossec-server and ossec-agent. @@ -34,8 +34,8 @@ def test_ossec_keyfiles(File, Sudo, keyfile): All this check does in confirm they're present, it doesn't perform any matching checks to validate the configuration. """ - with Sudo(): - f = File(keyfile) + with host.sudo(): + f = host.file(keyfile) assert f.is_file # The postinst scripts in the OSSEC deb packages set 440 on the # keyfiles; the Ansible config should be updated to do the same. @@ -46,30 +46,30 @@ def test_ossec_keyfiles(File, Sudo, keyfile): # Permissions don't match between Ansible and OSSEC deb packages postinst. @pytest.mark.xfail -def test_procmail_log(File, Sudo): +def test_procmail_log(host): """ Ensure procmail log file exist with proper ownership. Only the ossec user should have read/write permissions. """ - with Sudo(): - f = File("/var/log/procmail.log") + with host.sudo(): + f = host.file("/var/log/procmail.log") assert f.is_file assert f.user == "ossec" assert f.group == "root" assert oct(f.mode) == "0660" -def test_ossec_authd(Command, Sudo): +def test_ossec_authd(host): """ Ensure that authd is not running """ - with Sudo(): - c = Command("pgrep ossec-authd") + with host.sudo(): + c = host.run("pgrep ossec-authd") assert c.stdout == "" assert c.rc != 0 -def test_hosts_files(File, SystemInfo): +def test_hosts_files(host): """ Ensure host files mapping are in place """ - f = File('/etc/hosts') + f = host.file('/etc/hosts') app_ip = os.environ.get('APP_IP', securedrop_test_vars.app_ip) app_host = securedrop_test_vars.app_hostname @@ -78,7 +78,7 @@ def test_hosts_files(File, SystemInfo): assert f.contains('^{}\s*{}$'.format(app_ip, app_host)) -def test_ossec_log_contains_no_malformed_events(File, Sudo): +def test_ossec_log_contains_no_malformed_events(host): """ Ensure the OSSEC log reports no errors for incorrectly formatted messages. These events indicate that the OSSEC server failed to decrypt @@ -88,11 +88,11 @@ def test_ossec_log_contains_no_malformed_events(File, Sudo): Documentation regarding this error message can be found at: http://ossec-docs.readthedocs.io/en/latest/faq/unexpected.html#id4 """ - with Sudo(): - f = File("/var/ossec/logs/ossec.log") + with host.sudo(): + f = host.file("/var/ossec/logs/ossec.log") assert not f.contains("ERROR: Incorrectly formated message from") -def test_regression_hosts(Command): +def test_regression_hosts(host): """ Regression test to check for duplicate entries. """ - assert Command.check_output("uniq --repeated /etc/hosts") == "" + assert host.check_output("uniq --repeated /etc/hosts") == "" diff --git a/molecule/testinfra/staging/mon/test_postfix.py b/molecule/testinfra/staging/mon/test_postfix.py --- a/molecule/testinfra/staging/mon/test_postfix.py +++ b/molecule/testinfra/staging/mon/test_postfix.py @@ -13,20 +13,20 @@ '/^User-Agent:/ IGNORE', '/^Received:/ IGNORE', ]) -def test_postfix_headers(File, header): +def test_postfix_headers(host, header): """ Ensure postfix header filters are set correctly. Common mail headers are stripped by default to avoid leaking metadata about the instance. Message body is always encrypted prior to sending. """ - f = File("/etc/postfix/header_checks") + f = host.file("/etc/postfix/header_checks") assert f.is_file assert oct(f.mode) == "0644" regex = '^{}$'.format(re.escape(header)) assert re.search(regex, f.content, re.M) -def test_postfix_generic_maps(File): +def test_postfix_generic_maps(host): """ Regression test to check that generic Postfix maps are not configured by default. As of #1565 Admins can opt-in to overriding the FROM address @@ -34,11 +34,11 @@ def test_postfix_generic_maps(File): `[email protected]` behavior, to avoid breaking email for previously existing instances. """ - assert not File("/etc/postfix/generic").exists - assert not File("/etc/postfix/main.cf").contains("^smtp_generic_maps") + assert not host.file("/etc/postfix/generic").exists + assert not host.file("/etc/postfix/main.cf").contains("^smtp_generic_maps") -def test_postfix_service(Service, Socket, Sudo): +def test_postfix_service(host): """ Check Postfix service. Postfix is used to deliver OSSEC alerts via encrypted email. On staging hosts, Postfix is disabled, due to lack @@ -46,10 +46,10 @@ def test_postfix_service(Service, Socket, Sudo): """ # Elevated privileges are required to read Postfix service info, # specifically `/var/spool/postfix/pid/master.pid`. - with Sudo(): - postfix = Service("postfix") + with host.sudo(): + postfix = host.service("postfix") assert postfix.is_running == securedrop_test_vars.postfix_enabled assert postfix.is_enabled == securedrop_test_vars.postfix_enabled - socket = Socket("tcp://127.0.0.1:25") + socket = host.socket("tcp://127.0.0.1:25") assert socket.is_listening == securedrop_test_vars.postfix_enabled
[xenial] Ensure TestInfra test completeness on Trusty, Xenial By design, the TestInfra config test suite runs slightly different checks for Trusty and Xenial. Care should be taken to preserve functionality of the config tests against both releases. Please note omissions or test failures in this ticket; we can then scope new issues as appropriate. Specifically, config tests should pass for the following scenarios: - Clean Trusty install (done) - Clean Xenial install (likely done) - Trusty -> Xenial upgrade (not done) Part of #3204.
2019-02-05T01:37:45Z
[]
[]
freedomofpress/securedrop
4,153
freedomofpress__securedrop-4153
[ "4078" ]
03b4a07e1e80596bef05d302130294c51a1bbd73
diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py --- a/securedrop/source_app/info.py +++ b/securedrop/source_app/info.py @@ -7,6 +7,10 @@ def make_blueprint(config): view = Blueprint('info', __name__) + @view.route('/disable-noscript-xss') + def disable_noscript_xss(): + return render_template("disable-noscript-xss.html") + @view.route('/tor2web-warning') def tor2web_warning(): return render_template("tor2web-warning.html")
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -223,6 +223,9 @@ def _source_tor2web_warning(self): def _source_why_journalist_key(self): self.driver.get(self.source_location + "/why-journalist-key") + def _source_disable_noscript_xss(self): + self.driver.get(self.source_location + "/disable-noscript-xss") + def _source_waits_for_session_to_timeout(self, session_length_minutes): time.sleep(session_length_minutes * 60 + 0.1) diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -17,3 +17,6 @@ def test_lookup_codename_hint(self): self._source_chooses_to_login() self._source_proceeds_to_login() self._source_sees_no_codename() + + def test_disable_noscript_xss(self): + self._source_disable_noscript_xss() diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -516,6 +516,14 @@ def test_why_journalist_key(source_app): assert "Why download the journalist's public key?" in text +def test_disable_noscript_xss(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('info.disable_noscript_xss')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "<h1>Turn off NoScript's cross-site request sanitization setting</h1>" in text + + def test_metadata_route(source_app): with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: mocked_platform.return_value = ("Ubuntu", "16.04", "xenial")
Some Source Interface uploads fail with Internal Server Error ## Description The SD Source Interface should support file uploads of up to 500MB, with larger uploads failing immediately. Instead, large file uploads <500MB are frequently failing after a long timeout. (This was initially reported as happening with files of 50MB or more, but it isn't consistently reproducible at that size.) ## Steps to Reproduce Using VMs or a hardware instance: 1. Visit the Source Interface using the Tor Browser 2. Click through to the /lookup page 3. Upload a large file (50MB+) ## Expected Behavior File upload completes with success message on page. ## Actual Behavior Upload has a pretty good chance of failing with message below: <img width="987" alt="screen-shot-2019-01-28-at-11 52 40-am" src="https://user-images.githubusercontent.com/2782952/51862492-9bee6500-22f3-11e9-978c-654fd159e7ef.png">
Corresponding log lines in source error apache logs: ``` [Mon Jan 28 23:16:55.007331 2019] [reqtimeout:info] [pid 5161:tid 3478096422656] [client 127.0.0.1:32854] AH01382: Request body read timeout [Mon Jan 28 23:16:55.008916 2019] [:error] [pid 5161:tid 3478096422656] (70007)The timeout specified has expired: [client 127.0.0.1:32854] mod_wsgi (pid=5161): Unable to get bucket brigade for request., referer: http://<my staging server>.onion/lookup [Mon Jan 28 23:16:55.048291 2019] [:error] [pid 5159:tid 3478277211904] [remote 127.0.0.1:36570] mod_wsgi (pid=5159): Exception occurred processing WSGI script '/var/www/source.wsgi'. [Mon Jan 28 23:16:55.048872 2019] [:error] [pid 5159:tid 3478277211904] [remote 127.0.0.1:36570] IOError: failed to write data ``` The connection is dropping for some reason before the file upload finishes, this could be an Apache timeout or tor level issue... It might be worth experimenting with the `TimeOut` and `KeepAliveTimeout` directives - I've tried increasing them from the default, which doesn't resolve the situation entirely, but resolving this issue may be a matter of tuning these values. But I currently have insufficient data to determine whether or not increasing these timeouts improves the situation or not. One initial thought was that this was a regression error introduced relatively recently. Testing against the 0.8.0 tag on staging VMs with a 100MB test file, I managed to reproduce the error, so it's been around for a while. If we can tweak keepalives and such without increasing the risk of DoS attacks, that might be the simplest way to resolve this bug. Looking at: https://metrics.torproject.org/torperf.html?start=2018-10-31&end=2019-01-29&server=onion&filesize=5mb it seems like a 500MB file could take 25min+ on average (5s to set up, 3s/MB), if that informs timeout settings. I am testing various numbers on those configuration files with a 125MB file. ``` Timeout 120 KeepAlive On MaxKeepAliveRequests 400 KeepAliveTimeout 300 ``` Allows me to submit 125MB file nicely, but, not 250MB files. I am still trying out other numbers. Even by doubling every number, a file of 250MB size is failing. Someone else please pick it up from here as I will be logging out soon. This morning tried ``` KeepAlive On KeepAliveTimeout 60 ``` (which I see now was a bit more conservative than @kushaldas' settings) and it made no difference. 100MB+ still failing on staging VMs for me. After reading a bit more I don't see the KeepAlive timeout value as being relevant, it would affect subsequent connections but not single long-running ones AFAICT. the Timeout value is probably more important. Looking into mod_reqtimeout in `/etc/apache2/mods-enabled/reqtimeout.conf` - settings are the default, which includes a minimum bitrate of 500bytes/s. Setting that down to 100bytes/s got me my first successful 100MB upload! (Again depending on how much protection we need against DoSes, that min bitrate setting could be turned off altogether. Then the Apache `Timeout` directive would be the relevant one to tweak.) Tested 250MB upload with minrate disabled: connection never timed out, no network activity after 30mins or so, killed it after 1hr. Tested 250MB upload with minrate set to 100: connection timed out after 30min with "Internal Server Error" message Tested 250MB upload with head and body timeouts set to 0 (effectively disabling mod_reqtimeout): connection stays open after download completed, never closes. Tested 250MB upload with mod_reqtimeout disabled with `a2dismod reqtimeout && service apache2 restart`: connection stays open after download completed, never closes. I also tried to disable the whole `gpg` encryption just to test, the last file uploads were still getting connection reset. I modified the following in the `store.py`. ```Python import shutil shutil.copyfile(stf, encrypted_file_path) #current_app.crypto_util.encrypt( # stf, self.__gpg_key, encrypted_file_path) ``` I applied the diff below in a staging VM in order to determine if offloading work to an async worker would address this issue. The diff below disables both gpg encryption and moving files around on disk in case the length of time these tasks are taking is causing the timeouts. My expectation is that if the length of time these tasks is taking is the sole cause of the timeouts, then with the diff below no timeouts should occur. Unfortunately, for a 70MB file, I still saw the timeout 50% of the time (warning: low number statistics, I only did this 4 times). So we need to do a bit more investigation, as there is either another cause of the issue, instead of or - if there are multiple contributing factors - in addition to the hypothesized cause above. Here's the diff: ```diff diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py index e3dd66b4..649cff3e 100644 --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -157,6 +157,8 @@ def make_blueprint(config): fh.filename, fh.stream)) + current_app.logger.error('we got back into the view function!') + if first_submission: msg = render_template('first_submission_flashed_message.html') flash(Markup(msg), "success") diff --git a/securedrop/store.py b/securedrop/store.py index 924f8d5e..f0e909f2 100644 --- a/securedrop/store.py +++ b/securedrop/store.py @@ -71,8 +71,8 @@ class Storage: if ext != '.gpg': # if there's an extension, verify it's a GPG raise PathException("Invalid file extension %s" % (ext, )) - if not VALIDATE_FILENAME(filename): - raise PathException("Invalid filename %s" % (filename, )) + #if not VALIDATE_FILENAME(filename): + # raise PathException("Invalid filename %s" % (filename, )) def path(self, *s): """Get the normalized, absolute file path, within @@ -136,7 +136,8 @@ class Storage: count, journalist_filename) encrypted_file_path = self.path(filesystem_id, encrypted_file_name) - with SecureTemporaryFile("/tmp") as stf: # nosec + current_app.logger.error('file starting to stream to disk!') + with SecureTemporaryFile("/var/lib/securedrop/store") as stf: # nosec with gzip.GzipFile(filename=sanitized_filename, mode='wb', fileobj=stf, mtime=0) as gzf: # Buffer the stream into the gzip file to avoid excessive @@ -147,10 +148,20 @@ class Storage: break gzf.write(buf) - current_app.crypto_util.encrypt( - stf, self.__gpg_key, encrypted_file_path) + current_app.logger.error('file finished streaming to disk!') - return encrypted_file_name + # Disable gpg encryption as this might take some time and cause + # a timeout. + #current_app.crypto_util.encrypt( + # stf, self.__gpg_key, encrypted_file_path) + + # Touch a file in place instead of copying, as copying might take some + # time and cause a timeout. + with open(stf.name + '.gpg', 'a'): + os.utime(stf.name + '.gpg', None) + + # Return this dummy file so success is reported back to the user/tester. + return stf.name + '.gpg' def save_pre_encrypted_reply(self, filesystem_id, count, journalist_filename, content): ``` Likely related: https://github.com/micahflee/onionshare/issues/899 (courtesy @micahflee) OK I've now tested two 300 MB file uploads to a staging instance after unchecking the `Sanitize cross-site suspicious requests` option in NoScript: <img width="509" alt="screen shot 2019-02-12 at 5 57 21 pm" src="https://user-images.githubusercontent.com/7832803/52681146-bc234400-2eef-11e9-8ecc-c1c951a6052e.png"> and they both worked, so we're indeed hitting the same problem as onionshare. Until upstream resolves this issue, choices are: 1. Drastically restrict file upload size. 2. Enable js and use an AJAX based approach similar to what onionshare is doing in https://github.com/micahflee/onionshare/pull/901/files as a workaround (this involves instructing sources to set the security slider to Safer instead of Safest) - obviously a big departure from SecureDrop's prior avoidance of JavaScript 3. Provide a page walking users through how to disable the sanitization option above, which involves going into Noscript's advanced settings, and seems somewhat scary and suspicious so the messaging needs to be carefully written (and decided on ASAP because the strings needs to get translated) 1 or 3 is something we could realistically do for 0.12.0, I prefer 3 since it preserves functionality. We should make sure tomorrow to evaluate the implications of disabling this protection in more detail, but from a first analysis it seems it is the lowest risk option that preserves SecureDrop's main functionality, which after all, is the uploading of files. Tested with NoScript "Sanitize cross-site suspicious requests" checkbox unchecked & 271MB file on 0.11.1 prod/hardware instance: success! Let's indeed discuss how to best engage with the associated upstream issues but I agree that, in the absence of a fix, option 3 makes the most sense. It looks like you're probably not going with option 2. But if you do, I just discovered that it will only work with the Tor Browser security slider set to Standard, not to Safer or Safest (except on SecureDrop servers that use HTTPS, then Safer will work). The slider set to Safer blocks javascript on non-HTTPS sites, including non-HTTPS onion sites. ^ Design ticket to work on messaging/walkthrough things for Option 3. FYI. Will post what the team decides they like, here, once the design task Issue is resolved. A few notes: - Given that the root cause is POST data sanitization by a browser extension, it's unlikely that this error will only be triggered above a certain size threshold. It is more likely that the error will be triggered at higher probability at larger sizes. I have changed the issue title accordingly. - Our plan of record is to add an explanation to the Source Interface. We've also engaged upstream NoScript, and may reach out to upstream Mozilla as well, but that's unlikely to help in the near term. @rmol is taking the lead on implementation of the explanatory page, starting with some preliminary investigations, and @ninavizz is taking a design stab at different integration options. - As a reminder, we need to get Source Interface text changes in before EOD 2/19 (string freeze), and 2/18 is a US holiday. So we're in a bit of a crunch on this, time-wise. This argues for the simplest possible implementation. I suggest we aim to have a first PR at least in WIP state (with text/design possibly in flux) by EOD tomorrow (Thursday). **Decided Upon Source Interface Messaging (ignore the pink prototype nav arrows in mox)** - All text in items 2 and 3, cut-and-pasted verbatim from existing experience to preserve translations - @eloquence and @redshiftzero on the fence about ordering of items 2 and 3; see discussion-points in "Feedback" in [ux Issue comment](https://github.com/freedomofpress/securedrop-ux/issues/44#issuecomment-463752613). **UX Action Items remaining** - Nina will post final text & PNG assets for dev(s) to build page(s) and UI edits in new comment, here, by Thurs Eve (PST) ![image](https://user-images.githubusercontent.com/8262612/52811127-30bdc600-3049-11e9-84ba-b66ac53c762c.png) ![image](https://user-images.githubusercontent.com/8262612/52811113-27345e00-3049-11e9-8fc9-e8364aa22791.png) We've agreed to keep the ordering as in the mock above. Per @emkll we'll also add a recommendation to re-enable the setting post-upload to the instructions, and of course we'll need to monitor the NoScript situation carefully, since asking users explicitly to turn off a security setting is far from ideal, even if it has no effect for users with JavaScript disabled. Chatted with @emkll a bit about this and he made two other points that I think we should include: 1. Can we indicate in the text that this disabling of the XSS filter is a temporary situation, and not the New Way that SecureDrop works? 2. Can we say: "(Required for submission of files larger than 50MB, or if you have trouble uploading)" instead of "(Required)"? If someone has a small file, chances are they will be fine, and we can have them just upload and then disable the XSS filter if there is an issue. Thoughts? > 1. Can we indicate in the text that this disabling of the XSS filter is a temporary situation, and not the New Way that SecureDrop works? That makes sense to me! > 2. Can we say: "(Required for submission of files larger than 50MB, or if you have trouble uploading)" instead of "(Required)"? If someone has a small file, chances are they will be fine, and we can have them just upload and then disable the XSS filter if there is an issue. How about "required for uploads to work reliably"? My understanding is that this is technically the most accurate description. The "50MB" really just traces back to our early investigation and we don't have sufficient empirical grounding for using it in messaging, IMO. Users will see an "Internal Server Error" if their uploads fail, without any obvious explanation of how to mitigate. Some uploads will work, some will fail. I think we should encourage all users to uncheck the box so they don't hit that error wall, and then re-check it when they're done. > How about "required for uploads to work reliably"? My understanding is that this is technically the most accurate description. The "50MB" really just traces back to our early investigation and we don't have sufficient empirical grounding for using it in messaging, IMO. I like that. Could we also reassure the source that this isn't going to endanger them, e.g. "This is required for uploads to work reliably, and is safe with our recommended Tor Browser settings." or is that too verbose here, and better left for the how-to page? I've taken the text from Nina's designs, edited it a bit, and put it in a GDoc for easier collaboration: https://docs.google.com/document/d/1HNrRaOrRJfUX61DDNRpQcc9UKs6y80Y9DWUf3PxypU4/edit# Please make edits in suggest mode. I've tried to keep the text as short as possible, to minimize the workload for translators, and to keep things as simple as possible from the user's point of view. @ninavizz made a good case for keeping the language in the 1,2,3 list as simple as possible to ensure users actually follow it (see comment history in GDoc). The current draft has a minimal "ask" on the upload screen. The "show me how" page offers an explanation why this is needed and what the security implications are, a link to the relevant issue for people who are interested, and clear instructions for what to do. (I recommend that we link to the NoScript issue since it provides the most relevant context.) Images for 411 page. Will get SVG dialog "!" image up (an optional thing for this PR, I know) for the confirmation page, once home from the vet in 2hrs: https://drive.google.com/open?id=16DUGdVwEZvQhOkPbIYFMJ8eWpcJHP6Q5 Just making a note here that we should test a bit more (or look at the NoScript code) whether: 1) it makes any difference whether or not the SecureDrop upload page is reloaded or not after the setting is unchecked. 2) it makes any difference whether or not the NoScript Settings browser tab is closed or not after the setting is unchecked. Preliminary testing indicates "no difference" for 1) (i.e. it is fine to just upload immediately after unchecking the setting). I've not tested 2) yet. I'm guessing the setting takes effect immediately, but we should verify that.
2019-02-18T23:19:26Z
[]
[]
freedomofpress/securedrop
4,193
freedomofpress__securedrop-4193
[ "4192" ]
3ad197c7cb51a3002b72659001ac7c835130ec58
diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py --- a/securedrop/i18n_tool.py +++ b/securedrop/i18n_tool.py @@ -40,10 +40,12 @@ class I18NTool(object): 'fr_FR': {'name': 'French', 'desktop': 'fr', }, 'el': {'name': 'Greek', 'desktop': 'el', }, 'hi': {'name': 'Hindi', 'desktop': 'hi', }, + 'is': {'name': 'Icelandic', 'desktop': 'is', }, 'it_IT': {'name': 'Italian', 'desktop': 'it', }, 'nb_NO': {'name': 'Norwegian', 'desktop': 'nb_NO', }, 'nl': {'name': 'Dutch', 'desktop': 'nl', }, 'pt_BR': {'name': 'Portuguese, Brasil', 'desktop': 'pt_BR', }, + 'ro': {'name': 'Romanian', 'desktop': 'ro', }, 'ru': {'name': 'Russian', 'desktop': 'ru', }, 'sv': {'name': 'Swedish', 'desktop': 'sv', }, 'tr': {'name': 'Turkish', 'desktop': 'tr', }, @@ -124,7 +126,7 @@ def translate_desktop(self, args): if args.compile: pos = filter(lambda f: f.endswith('.po'), os.listdir(args.translations_dir)) - linguas = map(lambda l: l.rstrip('.po'), pos) + linguas = map(lambda l: l[:-3], pos) content = "\n".join(linguas) + "\n" open(join(args.translations_dir, 'LINGUAS'), 'w').write(content)
diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py --- a/securedrop/tests/test_i18n_tool.py +++ b/securedrop/tests/test_i18n_tool.py @@ -85,6 +85,21 @@ def test_translate_desktop_l10n(self, tmpdir): po_file) assert exists(po_file) + # Regression test to trigger bug introduced when adding + # Romanian as an accepted language. + locale = 'ro' + po_file = join(str(tmpdir), locale + ".po") + msginit( + '--no-translator', + '--locale', locale, + '--output', po_file, + '--input', messages_file) + source = 'SecureDrop Source Interfaces' + sed('-i', '-e', + '/{}/,+1s/msgstr ""/msgstr "SOURCE RO"/'.format(source), + po_file) + assert exists(po_file) + # # Compile but do not extract+update #
error when compiling ro translations ## Description Discovered and fixed by @kushaldas in d63f61458cfdf41e95df673e2fba92b1c704d159, just filing this as an issue so there is a clear change history ## Steps to Reproduce 1. Add `ro` (Romanian) to SecureDrop's supported languages (ref: 862860f80b9435cf5df2549f97dabe6f41182386) 2. Compile translations: ``` $ securedrop/bin/dev-shell ./i18n_tool.py --verbose translate-desktop --compile ``` ## Expected Behavior `.mo` files are created ## Actual Behavior ``` $ securedrop/bin/dev-shell ./i18n_tool.py --verbose translate-desktop --compile Run with DOCKER_BUILD_VERBOSE=true for more information Docker image build in progress done ! 2019-02-25 17:28:43,373 INFO <Command u'/usr/bin/msgfmt --desktop --template desktop-journalist-icon.j2.in -o desktop-journalist-icon.j2 -d .'>: starting process 2019-02-25 17:28:43,380 INFO <Command u'/usr/bin/msgfmt --desktop --template desktop-journalist-icon.j2.in -o desktop-journalist-icon.j2 -d .', pid 9>: process started Traceback (most recent call last): File "./i18n_tool.py", line 372, in <module> sys.exit(I18NTool().main(sys.argv[1:])) File "./i18n_tool.py", line 366, in main return args.func(args) File "./i18n_tool.py", line 139, in translate_desktop _cwd=args.translations_dir) File "/usr/local/lib/python2.7/dist-packages/sh.py", line 1427, in __call__ return RunningCommand(cmd, call_args, stdin, stdout, stderr) File "/usr/local/lib/python2.7/dist-packages/sh.py", line 774, in __init__ self.wait() File "/usr/local/lib/python2.7/dist-packages/sh.py", line 792, in wait self.handle_command_exit_code(exit_code) File "/usr/local/lib/python2.7/dist-packages/sh.py", line 815, in handle_command_exit_code raise exc sh.ErrorReturnCode_1: RAN: /usr/bin/msgfmt --desktop --template desktop-journalist-icon.j2.in -o desktop-journalist-icon.j2 -d . STDOUT: STDERR: /usr/bin/msgfmt: error while opening "r.po" for reading: No such file or directory ``` ## Comments This is due to rstrip stripping an additional character
2019-02-25T23:35:45Z
[]
[]
freedomofpress/securedrop
4,253
freedomofpress__securedrop-4253
[ "4237" ]
afcf563f2f5c072253c0f25b8b9944daa3fbbc1c
diff --git a/molecule/vagrant-packager-trusty/package.py b/molecule/vagrant-packager-trusty/package.py new file mode 120000 --- /dev/null +++ b/molecule/vagrant-packager-trusty/package.py @@ -0,0 +1 @@ +../vagrant-packager/package.py \ No newline at end of file diff --git a/molecule/vagrant_packager/package.py b/molecule/vagrant-packager/package.py similarity index 65% rename from molecule/vagrant_packager/package.py rename to molecule/vagrant-packager/package.py --- a/molecule/vagrant_packager/package.py +++ b/molecule/vagrant-packager/package.py @@ -3,6 +3,7 @@ # # # +import hashlib import json import os from os.path import join @@ -13,6 +14,17 @@ import xml.etree.ElementTree as ET +# Current script is symlinked into adjacent scenario, for Trusty compatibility. +# Look up "name" for scenario from real path (relative to symlink), but store +# all artifacts in primary scenario (via realpath). +SCENARIO_NAME = os.path.basename(os.path.dirname(os.path.abspath(__file__))) +SCENARIO_PATH = os.path.dirname(os.path.realpath(__file__)) +BOX_BUILD_DIR = join(SCENARIO_PATH, "build") +BOX_METADATA_DIR = join(SCENARIO_PATH, "box_files") +EPHEMERAL_DIRS = {} +TARGET_VERSION_FILE = os.path.join(SCENARIO_PATH, os.path.pardir, "shared", "stable.ver") + + class LibVirtPackager(object): def __init__(self, vm): @@ -117,17 +129,14 @@ def vagrant_metadata(self, img_location): def main(): - SCENARIO_PATH = os.path.dirname(os.path.realpath(__file__)) - BOX_PATH = join(SCENARIO_PATH, "build") - EPHEMERAL_DIRS = {} - TARGET_VERSION_FILE = os.path.join(SCENARIO_PATH, os.path.pardir, "shared", "stable.ver") with open(TARGET_VERSION_FILE, 'r') as f: TARGET_VERSION = f.read().strip() - try: - TARGET_PLATFORM = os.environ['SECUREDROP_TARGET_PLATFORM'] - except KeyError: - msg = "Set SECUREDROP_TARGET_PLATFORM env var to 'trusty' or 'xenial'" - raise Exception(msg) + + # Default to Xenial as base OS, but detect if script was invoked from the + # Trusty-specific scenario, and use Trusty if so. + TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "xenial") + if SCENARIO_NAME.endswith("-trusty"): + TARGET_PLATFORM = "trusty" for srv in ["app-staging", "mon-staging"]: @@ -141,7 +150,7 @@ def main(): except OSError: pass - vm = LibVirtPackager("vagrant_packager_"+srv) + vm = LibVirtPackager("{}_{}".format(SCENARIO_NAME, srv)) tmp_img_file = join(EPHEMERAL_DIRS["tmp"], "wip.img") packaged_img_file = join(EPHEMERAL_DIRS["build"], "box.img") @@ -178,11 +187,11 @@ def main(): mdata) # Copy in appropriate vagrant file to build dir - shutil.copyfile(join(SCENARIO_PATH, "box_files", "Vagrantfile."+srv), + shutil.copyfile(join(BOX_METADATA_DIR, "Vagrantfile."+srv), join(EPHEMERAL_DIRS['build'], 'Vagrantfile')) print("Creating tar file") - box_file = join(BOX_PATH, "{}-{}_{}.box".format(srv, TARGET_PLATFORM, TARGET_VERSION)) + box_file = join(BOX_BUILD_DIR, "{}-{}_{}.box".format(srv, TARGET_PLATFORM, TARGET_VERSION)) with tarfile.open(box_file, "w|gz") as tar: for boxfile in ["box.img", "Vagrantfile", "metadata.json"]: tar.add(join(EPHEMERAL_DIRS["build"], boxfile), @@ -190,9 +199,68 @@ def main(): print("Box created at {}".format(box_file)) + print("Updating box metadata") + update_box_metadata(srv, box_file, TARGET_PLATFORM, TARGET_VERSION) + print("Clean-up tmp space") shutil.rmtree(EPHEMERAL_DIRS['tmp']) +def sha256_checksum(filepath): + """ + Returns a SHA256 checksum for a given filepath. + """ + checksum = hashlib.sha256() + with open(filepath, 'rb') as f: + # Read by chunks, to avoid slurping the entire file into memory. + # Box files range from 500MB to 1.5GB. + for block in iter(lambda: f.read(checksum.block_size), b''): + checksum.update(block) + return checksum.hexdigest() + + +def update_box_metadata(server_name, box_file, platform, version): + """ + Updates the JSON file of Vagrant box metadata, including remote URL, + version number, and SHA256 checksum. + """ + # Strip off "staging" suffix from box names + server_name_short = re.sub('\-staging$', '', server_name) + json_file_basename = "{}_{}_metadata.json".format(server_name_short, platform) + json_file = os.path.join(BOX_METADATA_DIR, json_file_basename) + + # Read in current JSON metadata, so we can append the new info to it. + with open(json_file, "r") as f: + metadata_config = json.loads(f.read()) + + base_url = "https://s3.amazonaws.com/securedrop-vagrant" + box_name = os.path.basename(box_file) + box_url = "{}/{}".format(base_url, box_name) + box_checksum = sha256_checksum(box_file) + box_config = dict( + name="libvirt", + url=box_url, + checksum_type="sha256", + checksum=box_checksum, + ) + # Creating list of dicts to adhere to JSON format of Vagrant box metadata + providers_list = [] + providers_list.append(box_config) + version_config = dict( + version=version, + providers=providers_list, + ) + box_versions = metadata_config['versions'] + box_versions.append(version_config) + metadata_config['versions'] = box_versions + + # Write out final, modified data. Does not validate for uniqueness, + # so repeated runs on the same version will duplicate version info, + # which'll likely break the box fetching. Target file is version-controlled, + # though, so easy enough to correct in the event of a mistake. + with open(json_file, "w") as f: + f.write(json.dumps(metadata_config, indent=2, sort_keys=True)) + + if __name__ == "__main__": main()
diff --git a/docs/development/upgrade_testing.rst b/docs/development/upgrade_testing.rst --- a/docs/development/upgrade_testing.rst +++ b/docs/development/upgrade_testing.rst @@ -29,6 +29,12 @@ https://apt-test.freedom.press/. Both options are described below. Upgrade testing using locally-built packages -------------------------------------------- +.. note:: + As of ``0.12.1``, the default platform for upgrade testing + boxes is Ubuntu Xenial 16.04. For a limited time, we will also support + upgrade boxes based on Ubuntu Trusty 14.04. Substitute ``upgrade-trusty`` + for ``upgrade`` in the actions documented below to force use of Trusty. + First, build the app code packages and create the environment: .. code:: sh @@ -120,13 +126,8 @@ testing. The procedure is as follows: 1. ``git checkout <version>`` 2. ``make vagrant-package`` -3. ``mv molecule/vagrant_packager/build/app-staging{,_<version>}.box`` -4. ``mv molecule/vagrant_packager/build/mon-staging{,_<version>}.box`` -5. ``sha256sum molecule/vagrant_packager/build/*.box`` -6. Manually update ``molecule/vagrant_packager/box_files/*.json`` with new - version information, including URL and checksum. -7. ``cd molecule/vagrant_packager && ./push.yml`` to upload to S3 -8. Commit the local changes to JSON files and open a PR. +3. ``cd molecule/vagrant-packager && ./push.yml`` to upload to S3 +4. Commit the local changes to JSON files and open a PR. Subsequent invocations of ``make upgrade-start`` will pull the latest version of the box.
Port upgrade scenario to Xenial ## Description The "upgrade" scenario in Molecule is Trusty-only, and should be ported to Xenial (as default), so that upgrade testing can be performed on Xenial hosts. Related to #4155. ## User Research Evidence None, dev-env only. ## User Stories As a developer, I want to test upgrades on the primary supported platform, i.e. Xenial.
2019-03-09T01:31:31Z
[]
[]
freedomofpress/securedrop
4,262
freedomofpress__securedrop-4262
[ "4208", "4100" ]
a8e21f340888060a5218104157d5b0ed723a7dcc
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -642,7 +642,8 @@ def check_for_updates(args): def get_release_key_from_keyserver(args, keyserver=None, timeout=45): - gpg_recv = ['timeout', str(timeout), 'gpg', '--recv-key'] + gpg_recv = ['timeout', str(timeout), 'gpg', '--batch', '--no-tty', + '--recv-key'] release_key = [RELEASE_KEY] # We construct the gpg --recv-key command based on optional keyserver arg. diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.12.0' +version = '0.12.1~rc1' # The full version, including alpha/beta/rc tags. -release = '0.12.0' +release = '0.12.1~rc1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.12.0' +__version__ = '0.12.1~rc1'
diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_play_configuration.py similarity index 87% rename from molecule/ansible-config/tests/test_max_fail_percentage.py rename to molecule/ansible-config/tests/test_play_configuration.py --- a/molecule/ansible-config/tests/test_max_fail_percentage.py +++ b/molecule/ansible-config/tests/test_play_configuration.py @@ -78,3 +78,16 @@ def test_any_errors_fatal(host, playbook): assert 'any_errors_fatal' in play # Ansible coerces booleans, so bare assert is sufficient assert play['any_errors_fatal'] + + [email protected]('playbook', find_ansible_playbooks()) +def test_locale(host, playbook): + """ + The securedrop-prod and securedrop-staging playbooks should + control the locale in the host environment by setting LC_ALL=C. + """ + with io.open(os.path.join(ANSIBLE_BASE, playbook), 'r') as f: + playbook_yaml = yaml.safe_load(f) + for play in playbook_yaml: + assert 'environment' in play + assert play['environment']['LC_ALL'] == 'C' diff --git a/molecule/builder-trusty/tests/test_securedrop_deb_package.py b/molecule/builder-trusty/tests/test_securedrop_deb_package.py --- a/molecule/builder-trusty/tests/test_securedrop_deb_package.py +++ b/molecule/builder-trusty/tests/test_securedrop_deb_package.py @@ -241,7 +241,7 @@ def test_deb_package_contains_expected_conffiles(host, deb): deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) - # Only relevant for the securedrop-app-code package: + # For the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: tmpdir = tempfile.mkdtemp() # The `--raw-extract` flag includes `DEBIAN/` dir with control files @@ -255,6 +255,12 @@ def test_deb_package_contains_expected_conffiles(host, deb): conffiles = f.content_string.rstrip() assert conffiles == "/var/www/securedrop/static/i/logo.png" + # For the securedrop-config package, we want to ensure there are no + # conffiles so securedrop_additions.sh is squashed every time + if "securedrop-config" in deb_package.path: + c = host.run("dpkg-deb -I {}".format(deb)) + assert "conffiles" not in c.stdout + @pytest.mark.parametrize("deb", deb_packages) def test_deb_package_contains_css(host, deb): @@ -449,3 +455,21 @@ def test_ossec_binaries_are_present_server(host, deb): c = host.run("dpkg-deb --contents {}".format(deb_package.path)) for wanted_file in wanted_files: assert wanted_file in c.stdout + + [email protected]("deb", deb_packages) +def test_config_package_contains_expected_files(host, deb): + """ + Inspect the package contents to ensure all config files are included in + the package. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + if "securedrop-config" in deb_package.path: + wanted_files = [ + "/etc/cron-apt/action.d/9-remove", + "/etc/profile.d/securedrop_additions.sh", + ] + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + for wanted_file in wanted_files: + assert wanted_file in c.stdout diff --git a/molecule/builder-trusty/tests/vars.yml b/molecule/builder-trusty/tests/vars.yml --- a/molecule/builder-trusty/tests/vars.yml +++ b/molecule/builder-trusty/tests/vars.yml @@ -1,8 +1,8 @@ --- -securedrop_version: "0.12.0" +securedrop_version: "0.12.1~rc1" ossec_version: "3.0.0" keyring_version: "0.1.2" -config_version: "0.1.2" +config_version: "0.1.3" grsec_version: "4.4.167" # These values will be interpolated with values populated above diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py --- a/molecule/testinfra/staging/common/test_system_hardening.py +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -1,6 +1,8 @@ import pytest import re +testinfra_hosts = ["app", "app-staging", "mon", "mon-staging"] + @pytest.mark.parametrize('sysctl_opt', [ ('net.ipv4.conf.all.accept_redirects', 0), @@ -133,3 +135,13 @@ def test_no_ecrypt_messages_in_logs(host, logfile): # string to make it into syslog as a side-effect of the testinfra # invocation, causing subsequent test runs to report failure. assert error_message not in f.content_string + + [email protected]('package', [ + 'libiw30', + 'wpasupplicant', + 'wireless-tools', +]) +def test_unused_packages_are_removed(host, package): + """ Check if unused package is present """ + assert host.package(package).is_installed is False diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -1,4 +1,5 @@ import re +import textwrap def test_sudoers_config(host): @@ -38,16 +39,33 @@ def test_sudoers_tmux_env(host): the corresponding settings there. """ - f = host.file('/etc/profile.d/securedrop_additions.sh') - non_interactive_str = re.escape('[[ $- != *i* ]] && return') - tmux_check = re.escape('test -z "$TMUX" && (tmux attach ||' - ' tmux new-session)') + host_file = host.file('/etc/profile.d/securedrop_additions.sh') + expected_content = textwrap.dedent( + """\ + [[ $- != *i* ]] && return - assert f.contains("^{}$".format(non_interactive_str)) - assert f.contains("^if which tmux >\/dev\/null 2>&1; then$") + which tmux >/dev/null 2>&1 || return - assert 'test -z "$TMUX" && (tmux attach || tmux new-session)' in f.content - assert f.contains(tmux_check) + tmux_attach_via_proc() { + # If the tmux package is upgraded during the lifetime of a + # session, attaching with the new binary can fail due to different + # protocol versions. This function attaches using the reference to + # the old executable found in the /proc tree of an existing + # session. + pid=$(pgrep --newest tmux) + if test -n "$pid" + then + /proc/$pid/exe attach + fi + return 1 + } + + if test -z "$TMUX" + then + (tmux attach || tmux_attach_via_proc || tmux new-session) + fi""" + ) + assert host_file.content_string == expected_content def test_tmux_installed(host):
Add "« Back to submission page" link to NoScript docs The `why-journalist-key` page in the Source Interface has a helpful "« Back to submission page" link at the bottom, the `disable-noscript-xss` page does not. We should have the link on both pages (or make other consistent UX changes to both pages), especially given that otherwise there's only a prominent "LOG OUT" link, and users may be disinclined to click the "Back" button in connection with forms. # User Story As a source, I want to know how I get back to the submission screen after learning about fun technical workarounds, so that I can successfully continue my submission. SecureDrop GUI updater fails due to keyserver timeout ## Description The SecureDrop GUI updater is failing with the "Update failed. Please contact your SecureDrop administrator" message. Checking the detailed log, the failure is due to the call to retrieve the release signing key from the keyserver at hkps://hkps.pool.sks-keyservers.net timing out. ## Steps to Reproduce On a Tails 3.12 Admin or Journalist stick: - set the code version to an earlier tag than the current release: ``` cd ~/Persistent/securedrop git checkout 0.11.0 ``` - Restart the Tor network connection (eg. by disconnecting and reconnecting the network connection) - Wait for the GUI Updater to appear and click *Update Now* ## Expected Behavior Update completes without error, user prompted for local admin password, tailsconfig phase completes without error. ## Actual Behavior Update fails as described above Please provide screenshots where appropriate. ## Comments The `./securedrop-admin update` command works fine. Maybe pull the key in the same way in the GUI updater?
Rats, good catch! For 0.12.0, we will need to - verify whether the graphical updater works _at all_ (i.e. intermittently or after repeated retries). If the graphical updater doesn't work at all but the CLI updater does, then that's unexpected and requires further investigation. - at minimum, document the behavior in the release notes. Adding rotation of keyservers or other mitigation is a stretch goal for the release. Too late to make changes like this for 0.12.0, moving into 0.12.1 milestone
2019-03-14T13:48:40Z
[]
[]
freedomofpress/securedrop
4,298
freedomofpress__securedrop-4298
[ "4286" ]
7334eba31021c97d935a29bfbe38f50b9b55becd
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -3,6 +3,7 @@ import datetime import os +import argparse from flask import current_app from sqlalchemy.exc import IntegrityError @@ -15,7 +16,7 @@ from models import Journalist, Reply, Source, Submission -def main(): +def main(staging=False): app = journalist_app.create_app(config) with app.app_context(): # Add two test users @@ -26,6 +27,10 @@ def main(): test_password, test_otp_secret, is_admin=True) + + if staging: + return + add_test_user("dellsberg", test_password, test_otp_secret, @@ -104,4 +109,9 @@ def create_source_and_submissions(num_submissions=2, num_replies=2): if __name__ == "__main__": # pragma: no cover - main() + parser = argparse.ArgumentParser() + parser.add_argument("--staging", help="Adding user for staging tests.", + action="store_true") + args = parser.parse_args() + + main(args.staging)
diff --git a/securedrop/tests/functional/README.md b/securedrop/tests/functional/README.md new file mode 100644 --- /dev/null +++ b/securedrop/tests/functional/README.md @@ -0,0 +1,33 @@ +### To test in prod vms + +- `sudo -u www-data bash` +- `cd /var/www/securedrop/` +- `./manage.py reset` # This will clean the DB for testing +- `./create-dev-data.py --staging` + +Update this information to the `tests/functional/instance_information.json` file. + +The content of the file looks like below. + +``` +{ + "hidserv_token": "asfjsdfag", + "journalist_location": "http://thejournalistfqb.onion", + "source_location": "http://thesourceadsfa.onion", + "sleep_time": 10, + "user": { + "name": "journalist", + "password": "WEjwn8ZyczDhQSK24YKM8C9a", + "secret": "JHCOGO7VCER3EJ4L" + } +} +``` + +### Run the tests + +``` +cd securedrop +./bin/dev-shell ./bin/run-test -v tests/functional/ +``` +You may wish to append a pipe to less (i.e. `| less`), as a failure may generate +many pages of output, making it difficult to scroll back.
Port support for creating dev data in staging VMs from `tbb-0.9.0` to `develop` In the `tbb-0.9.0` branch for functional testing via the Tor browser we added support for user creation in staging VMs, using the `create-dev-data.py` script, see #3672 and https://github.com/freedomofpress/securedrop/commit/249dd16ae9ae861c801ce282302ef07b6d38a16b. As part of incrementally merging the key changes in this branch, let's port this specific functionality to `develop`.
2019-03-25T16:11:06Z
[]
[]
freedomofpress/securedrop
4,309
freedomofpress__securedrop-4309
[ "3999" ]
2068b841d9b8554e4fd406e81384503a0bc01c16
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py --- a/journalist_gui/journalist_gui/SecureDropUpdater.py +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -5,6 +5,8 @@ import os import re import pexpect +import socket +import sys from journalist_gui import updaterUI, strings, resources_rc # noqa @@ -13,6 +15,25 @@ ESCAPE_POD = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') +def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # noqa + + # Null byte triggers abstract namespace + IDENTIFIER = '\0' + name + ALREADY_BOUND_ERRNO = 98 + + app.instance_binding = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + try: + app.instance_binding.bind(IDENTIFIER) + except OSError as e: + if e.errno == ALREADY_BOUND_ERRNO: + err_dialog = QtWidgets.QMessageBox() + err_dialog.setText(name + ' is already running.') + err_dialog.exec() + sys.exit() + else: + raise + + class SetupThread(QThread): signal = pyqtSignal('PyQt_PyObject')
diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py --- a/journalist_gui/test_gui.py +++ b/journalist_gui/test_gui.py @@ -1,6 +1,7 @@ import unittest import subprocess import pexpect +import pytest from unittest import mock from unittest.mock import MagicMock from PyQt5.QtCore import Qt @@ -8,6 +9,54 @@ from PyQt5.QtTest import QTest from journalist_gui.SecureDropUpdater import UpdaterApp, strings, FLAG_LOCATION +from journalist_gui.SecureDropUpdater import prevent_second_instance + + [email protected]('journalist_gui.SecureDropUpdater.sys.exit') [email protected]('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') +class TestSecondInstancePrevention(unittest.TestCase): + def setUp(self): + self.mock_app = mock.MagicMock() + self.mock_app.applicationName = mock.MagicMock(return_value='sd') + + @staticmethod + def socket_mock_generator(already_bound_errno=98): + namespace = set() + + def kernel_bind(addr): + if addr in namespace: + error = OSError() + error.errno = already_bound_errno + raise error + else: + namespace.add(addr) + + socket_mock = mock.MagicMock() + socket_mock.socket().bind = mock.MagicMock(side_effect=kernel_bind) + return socket_mock + + def test_diff_name(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator() + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name2') + + mock_exit.assert_not_called() + + def test_same_name(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator() + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name1') + + mock_exit.assert_any_call() + + def test_unknown_kernel_error(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator(131) # crazy unexpected error + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + with pytest.raises(OSError): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name1') class AppTestCase(unittest.TestCase): @@ -184,7 +233,7 @@ def test_tails_status_success(self): 'failure_reason': ''} with mock.patch('os.remove') as mock_remove: - self.window.tails_status(result) + self.window.tails_status(result) # We do remove the flag file if the update does finish mock_remove.assert_called_once_with(FLAG_LOCATION) @@ -195,7 +244,7 @@ def test_tails_status_failure(self): 'failure_reason': '42'} with mock.patch('os.remove') as mock_remove: - self.window.tails_status(result) + self.window.tails_status(result) # We do not remove the flag file if the update does not finish mock_remove.assert_not_called()
On the Admin Workstation, N SecureDrop updater processes are started if N network interfaces are enabled in Tails ## Description If a Tails Admin Workstation has more than one network interface, the SecureDrop network manager hook that checks for updates will run for each active NIC. (For example, if a workstation has both Ethernet and Wifi enabled.) This is confusing to end users and may result in multiple update processes clobbering each other's changes. ## Steps to Reproduce On an Admin Workstation: - revert to an earlier SecureDrop version: ``` cd ~/Persistent/securedrop git checkout 0.10.0 ``` - enable multiple network connections (eg Ethernet and Wifi) and wait for their Tor connections to come up ## Expected Behavior A single instance of the Securedrop Updater is started. ## Actual Behavior Multiple instances of the SecureDrop Updater are started.
https://tendo.readthedocs.io/en/latest/ can help us to solve this one with minimal code change. @redshiftzero @emkll any comments? Last commit is 6 months ago, only 79 stars on GitHub - how commonly used is this dependency? Note that we use [this method](https://github.com/freedomofpress/securedrop-client/blob/master/securedrop_client/app.py#L113) to prevent duplicate instances of the securedrop-client, what do you think about using a similar approach here? >Last commit is 6 months ago, only 79 stars on GitHub - how commonly used is this dependency? Note that we use this method to prevent duplicate instances of the securedrop-client, what do you think about using a similar approach here? I was going to comment about too, we just simply use the same function here.
2019-04-01T14:57:53Z
[]
[]
freedomofpress/securedrop
4,314
freedomofpress__securedrop-4314
[ "4032" ]
4410a6cc8804a5c0a91a6abbc9640ba525196a12
diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py @@ -0,0 +1,84 @@ +"""add checksum columns + +Revision ID: b58139cfdc8c +Revises: f2833ac34bb6 +Create Date: 2019-04-02 10:45:05.178481 + +""" +import os +from alembic import op +import sqlalchemy as sa + +# raise the errors if we're not in production +raise_errors = os.environ.get('SECUREDROP_ENV', 'prod') != 'prod' + +try: + from journalist_app import create_app + from models import Submission, Reply + from sdconfig import config + from store import queued_add_checksum_for_file + from worker import rq_worker_queue +except: + if raise_errors: + raise + +# revision identifiers, used by Alembic. +revision = 'b58139cfdc8c' +down_revision = 'f2833ac34bb6' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('replies', schema=None) as batch_op: + batch_op.add_column(sa.Column('checksum', sa.String(length=255), nullable=True)) + + with op.batch_alter_table('submissions', schema=None) as batch_op: + batch_op.add_column(sa.Column('checksum', sa.String(length=255), nullable=True)) + + try: + app = create_app(config) + + # we need an app context for the rq worker extension to work properly + with app.app_context(): + conn = op.get_bind() + query = sa.text('''SELECT submissions.id, sources.filesystem_id, submissions.filename + FROM submissions + INNER JOIN sources + ON submissions.source_id = sources.id + ''') + for (sub_id, filesystem_id, filename) in conn.execute(query): + full_path = app.storage.path(filesystem_id, filename) + rq_worker_queue.enqueue( + queued_add_checksum_for_file, + Submission, + int(sub_id), + full_path, + app.config['SQLALCHEMY_DATABASE_URI'], + ) + + query = sa.text('''SELECT replies.id, sources.filesystem_id, replies.filename + FROM replies + INNER JOIN sources + ON replies.source_id = sources.id + ''') + for (rep_id, filesystem_id, filename) in conn.execute(query): + full_path = app.storage.path(filesystem_id, filename) + rq_worker_queue.enqueue( + queued_add_checksum_for_file, + Reply, + int(rep_id), + full_path, + app.config['SQLALCHEMY_DATABASE_URI'], + ) + except: + if raise_errors: + raise + + +def downgrade(): + with op.batch_alter_table('submissions', schema=None) as batch_op: + batch_op.drop_column('checksum') + + with op.batch_alter_table('replies', schema=None) as batch_op: + batch_op.drop_column('checksum') diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -21,6 +21,7 @@ JournalistInterfaceSessionInterface) from models import Journalist from store import Storage +from worker import rq_worker_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -83,6 +84,9 @@ def create_app(config): gpg_key_dir=config.GPG_KEY_DIR, ) + app.config['RQ_WORKER_NAME'] = config.RQ_WORKER_NAME + rq_worker_queue.init_app(app) + @app.errorhandler(CSRFError) def handle_csrf_error(e): # render the message first to ensure it's localized. diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -177,7 +177,7 @@ def all_source_submissions(source_uuid): methods=['GET']) @token_required def download_submission(source_uuid, submission_uuid): - source = get_or_404(Source, source_uuid, column=Source.uuid) + get_or_404(Source, source_uuid, column=Source.uuid) submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) @@ -185,16 +185,16 @@ def download_submission(source_uuid, submission_uuid): submission.downloaded = True db.session.commit() - return utils.serve_file_with_etag(source, submission.filename) + return utils.serve_file_with_etag(submission) @api.route('/sources/<source_uuid>/replies/<reply_uuid>/download', methods=['GET']) @token_required def download_reply(source_uuid, reply_uuid): - source = get_or_404(Source, source_uuid, column=Source.uuid) + get_or_404(Source, source_uuid, column=Source.uuid) reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) - return utils.serve_file_with_etag(source, reply.filename) + return utils.serve_file_with_etag(reply) @api.route('/sources/<source_uuid>/submissions/<submission_uuid>', methods=['GET', 'DELETE']) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - import os from datetime import datetime @@ -8,6 +7,8 @@ from flask_babel import gettext from sqlalchemy.sql.expression import false +import store + from db import db from models import Source, SourceStar, Submission, Reply from journalist_app.forms import ReplyForm @@ -114,6 +115,7 @@ def reply(): try: db.session.add(reply) db.session.commit() + store.async_add_checksum_for_file(reply) except Exception as exc: flash(gettext( "An unexpected error occurred! Please " diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -1,15 +1,13 @@ # -*- coding: utf-8 -*- - import binascii + from datetime import datetime from flask import (g, flash, current_app, abort, send_file, redirect, url_for, render_template, Markup, sessions, request) from flask_babel import gettext, ngettext -import hashlib from sqlalchemy.sql.expression import false import i18n -import worker from db import db from models import (get_one_or_else, Source, Journalist, @@ -17,6 +15,8 @@ LoginThrottledException, BadTokenException, SourceStar, PasswordError, Submission) from rm import srm +from store import add_checksum_for_file +from worker import rq_worker_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -174,7 +174,7 @@ def download(zip_basename, submissions): def delete_file(filesystem_id, filename, file_object): file_path = current_app.storage.path(filesystem_id, filename) - worker.enqueue(srm, file_path) + rq_worker_queue.enqueue(srm, file_path) db.session.delete(file_object) db.session.commit() @@ -261,7 +261,7 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions - job = worker.enqueue(srm, current_app.storage.path(filesystem_id)) + job = rq_worker_queue.enqueue(srm, current_app.storage.path(filesystem_id)) # Delete the source's reply keypair current_app.crypto_util.delete_reply_keypair(filesystem_id) @@ -327,16 +327,18 @@ def col_download_all(cols_selected): return download("all", submissions) -def serve_file_with_etag(source, filename): - response = send_file(current_app.storage.path(source.filesystem_id, - filename), +def serve_file_with_etag(db_obj): + file_path = current_app.storage.path(db_obj.source.filesystem_id, db_obj.filename) + response = send_file(file_path, mimetype="application/pgp-encrypted", as_attachment=True, add_etags=False) # Disable Flask default ETag + if not db_obj.checksum: + add_checksum_for_file(db.session, db_obj, file_path) + response.direct_passthrough = False - response.headers['Etag'] = '"sha256:{}"'.format( - hashlib.sha256(response.get_data()).hexdigest()) + response.headers['Etag'] = db_obj.checksum return response diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -164,6 +164,12 @@ class Submission(db.Model): filename = Column(String(255), nullable=False) size = Column(Integer, nullable=False) downloaded = Column(Boolean, default=False) + ''' + The checksum of the encrypted file on disk. + Format: $hash_name:$hex_encoded_hash_value + Example: sha256:05fa5efd7d1b608ac1fbdf19a61a5a439d05b05225e81faa63fdd188296b614a + ''' + checksum = Column(String(255)) def __init__(self, source, filename): self.source_id = source.id @@ -213,6 +219,12 @@ class Reply(db.Model): filename = Column(String(255), nullable=False) size = Column(Integer, nullable=False) + ''' + The checksum of the encrypted file on disk. + Format: $hash_name:$hex_encoded_hash_value + Example: sha256:05fa5efd7d1b608ac1fbdf19a61a5a439d05b05225e81faa63fdd188296b614a + ''' + checksum = Column(String(255)) deleted_by_source = Column(Boolean, default=False, nullable=False) diff --git a/securedrop/rm.py b/securedrop/rm.py --- a/securedrop/rm.py +++ b/securedrop/rm.py @@ -21,4 +21,5 @@ def srm(fn): subprocess.check_call(['srm', '-r', fn]) + # We need to return a non-`None` value so the rq worker writes this back to Redis return "success" diff --git a/securedrop/sdconfig.py b/securedrop/sdconfig.py --- a/securedrop/sdconfig.py +++ b/securedrop/sdconfig.py @@ -160,5 +160,10 @@ def __init__(self): except AttributeError: pass + if getattr(self, 'env', 'prod') == 'test': + self.RQ_WORKER_NAME = 'test' + else: + self.RQ_WORKER_NAME = 'default' + config = SDConfig() # type: SDConfig diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -20,6 +20,7 @@ from source_app.decorators import ignore_static from source_app.utils import logged_in from store import Storage +from worker import rq_worker_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -75,6 +76,9 @@ def create_app(config): gpg_key_dir=config.GPG_KEY_DIR, ) + app.config['RQ_WORKER_NAME'] = config.RQ_WORKER_NAME + rq_worker_queue.init_app(app) + @app.errorhandler(CSRFError) def handle_csrf_error(e): msg = render_template('session_timeout.html') diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -9,6 +9,8 @@ from flask_babel import gettext from sqlalchemy.exc import IntegrityError +import store + from db import db from models import Source, Submission, Reply, get_one_or_else from source_app.decorators import login_required @@ -177,9 +179,11 @@ def submit(): html_contents=html_contents) flash(Markup(msg), "success") + new_submissions = [] for fname in fnames: submission = Submission(g.source, fname) db.session.add(submission) + new_submissions.append(submission) if g.source.pending: g.source.pending = False @@ -203,6 +207,10 @@ def submit(): g.source.last_updated = datetime.utcnow() db.session.commit() + + for sub in new_submissions: + store.async_add_checksum_for_file(sub) + normalize_timestamps(g.filesystem_id) return redirect(url_for('main.lookup')) diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import binascii import gzip import os import re @@ -6,9 +7,13 @@ import zipfile from flask import current_app +from hashlib import sha256 +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker from werkzeug.utils import secure_filename from secure_tempfile import SecureTemporaryFile +from worker import rq_worker_queue VALIDATE_FILENAME = re.compile( @@ -194,3 +199,39 @@ def rename_submission(self, # Only return new filename if successful return new_filename return orig_filename + + +def async_add_checksum_for_file(db_obj): + return rq_worker_queue.enqueue( + queued_add_checksum_for_file, + type(db_obj), + db_obj.id, + current_app.storage.path(db_obj.source.filesystem_id, db_obj.filename), + current_app.config['SQLALCHEMY_DATABASE_URI'], + ) + + +def queued_add_checksum_for_file(db_model, model_id, file_path, db_uri): + # we have to create our own DB session because there is no app context + session = sessionmaker(bind=create_engine(db_uri))() + db_obj = session.query(db_model).filter_by(id=model_id).one() + add_checksum_for_file(session, db_obj, file_path) + # We need to return a non-`None` value so the rq worker writes this back to Redis + return "success" + + +def add_checksum_for_file(session, db_obj, file_path): + hasher = sha256() + with open(file_path, 'rb') as f: + while True: + read_bytes = f.read(4096) + if not read_bytes: + break + hasher.update(read_bytes) + + digest = binascii.hexlify(hasher.digest()).decode('utf-8') + digest_str = u'sha256:' + digest + db_obj.checksum = digest_str + + session.add(db_obj) + session.commit() diff --git a/securedrop/worker.py b/securedrop/worker.py --- a/securedrop/worker.py +++ b/securedrop/worker.py @@ -1,14 +1,40 @@ -import os - from redis import Redis from rq import Queue -queue_name = 'test' if os.environ.get( - 'SECUREDROP_ENV') == 'test' else 'default' -# `srm` can take a long time on large files, so allow it run for up to an hour -q = Queue(name=queue_name, connection=Redis(), default_timeout=3600) +class RqWorkerQueue(object): + + ''' + A reference to a `rq` worker queue. + + Configuration: + `RQ_WORKER_NAME`: Name of the `rq` worker. + ''' + + __EXT_NAME = 'rq-worker-queue' + + def __init__(self, app=None): + self.__app = app + if app is not None: + self.init_app(app) + + def init_app(self, app): + self.__app = app + self.__app.config.setdefault('RQ_WORKER_NAME', 'default') + + try: + # check for presence of existing extension dict + self.__app.extensions + except AttributeError: + self.__app.extensions = {} + + queue_name = self.__app.config['RQ_WORKER_NAME'] + queue = Queue(name=queue_name, connection=Redis(), default_timeout=3600) + self.__app.extensions[self.__EXT_NAME] = queue + + def enqueue(self, *nargs, **kwargs): + queue = self.__app.extensions[self.__EXT_NAME] + return queue.enqueue(*nargs, **kwargs) -def enqueue(*args, **kwargs): - return q.enqueue(*args, **kwargs) +rq_worker_queue = RqWorkerQueue()
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -234,9 +234,11 @@ def journalist_api_token(journalist_app, test_journo): def _start_test_rqworker(config): if not psutil.pid_exists(_get_pid_from_file(TEST_WORKER_PIDFILE)): tmp_logfile = io.open('/tmp/test_rqworker.log', 'w') - subprocess.Popen(['rqworker', 'test', + subprocess.Popen(['rqworker', config.RQ_WORKER_NAME, '-P', config.SECUREDROP_ROOT, - '--pid', TEST_WORKER_PIDFILE], + '--pid', TEST_WORKER_PIDFILE, + '--logging_level', 'debug', + '-v'], stdout=tmp_logfile, stderr=subprocess.STDOUT) diff --git a/securedrop/tests/migrations/migration_b58139cfdc8c.py b/securedrop/tests/migrations/migration_b58139cfdc8c.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_b58139cfdc8c.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +import io +import os +import random +import uuid + +from os import path +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_chars, random_datetime + +random.seed('ᕕ( ᐛ )ᕗ') + +DATA = b'wat' +DATA_CHECKSUM = 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + + +class Helper: + + def __init__(self): + self.journalist_id = None + self.source_id = None + self._counter = 0 + + @property + def counter(self): + self._counter += 1 + return self._counter + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + } + sql = '''INSERT INTO journalists (uuid, username) + VALUES (:uuid, :username) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def create_source(self): + if self.source_id is not None: + raise RuntimeError('Source already created') + + self.source_filesystem_id = 'aliruhglaiurhgliaurg-{}'.format(self.counter) + params = { + 'filesystem_id': self.source_filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': False, + 'last_updated': random_datetime(nullable=True), + 'pending': False, + 'interaction_count': 0, + } + sql = '''INSERT INTO sources (filesystem_id, uuid, journalist_designation, flagged, + last_updated, pending, interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, :flagged, :last_updated, + :pending, :interaction_count) + ''' + self.source_id = db.engine.execute(text(sql), **params).lastrowid + + def create_submission(self, checksum=False): + filename = str(uuid.uuid4()) + params = { + 'uuid': str(uuid.uuid4()), + 'source_id': self.source_id, + 'filename': filename, + 'size': random.randint(10, 1000), + 'downloaded': False, + + } + + if checksum: + params['checksum'] = \ + 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, downloaded, checksum) + VALUES (:uuid, :source_id, :filename, :size, :downloaded, :checksum) + ''' + else: + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, downloaded) + VALUES (:uuid, :source_id, :filename, :size, :downloaded) + ''' + + return (db.engine.execute(text(sql), **params).lastrowid, filename) + + def create_reply(self, checksum=False): + filename = str(uuid.uuid4()) + params = { + 'uuid': str(uuid.uuid4()), + 'source_id': self.source_id, + 'journalist_id': self.journalist_id, + 'filename': filename, + 'size': random.randint(10, 1000), + 'deleted_by_source': False, + } + + if checksum: + params['checksum'] = \ + 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + sql = '''INSERT INTO replies (uuid, source_id, journalist_id, filename, size, + deleted_by_source, checksum) + VALUES (:uuid, :source_id, :journalist_id, :filename, :size, + :deleted_by_source, :checksum) + ''' + else: + sql = '''INSERT INTO replies (uuid, source_id, journalist_id, filename, size, + deleted_by_source) + VALUES (:uuid, :source_id, :journalist_id, :filename, :size, + :deleted_by_source) + ''' + return (db.engine.execute(text(sql), **params).lastrowid, filename) + + +class UpgradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + global DATA + with self.app.app_context(): + self.create_journalist() + self.create_source() + + submission_id, submission_filename = self.create_submission() + reply_id, reply_filename = self.create_reply() + + # we need to actually create files and write data to them so the RQ worker can hash them + for fn in [submission_filename, reply_filename]: + full_path = self.app.storage.path(self.source_filesystem_id, fn) + + dirname = path.dirname(full_path) + if not path.exists(dirname): + os.mkdir(dirname) + + with io.open(full_path, 'wb') as f: + f.write(DATA) + + def check_upgrade(self): + ''' + We cannot inject the `SDConfig` object provided by the fixture `config` into the alembic + subprocess that actually performs the migration. This is needed to get both the value of the + DB URL and access to the function `storage.path`. These values are passed to the `rqworker`, + and without being able to inject this config, the checksum function won't succeed. The above + `load_data` function provides data that can be manually verified by checking the `rqworker` + log file in `/tmp/`. + ''' + pass + + +class DowngradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + self.create_source() + + # create a submission and a reply that we don't add checksums to + self.create_submission(checksum=False) + self.create_reply(checksum=False) + + # create a submission and a reply that have checksums added + self.create_submission(checksum=True) + self.create_reply(checksum=True) + + def check_downgrade(self): + ''' + Verify that the checksum column is now gone. + ''' + with self.app.app_context(): + sql = "SELECT * FROM submissions" + submissions = db.engine.execute(text(sql)).fetchall() + for submission in submissions: + try: + # this should produce an exception since the column is gone + submission['checksum'] + except NoSuchColumnError: + pass + + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + for reply in replies: + try: + # this should produce an exception since the column is gone + submission['checksum'] + except NoSuchColumnError: + pass diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import hashlib import json import os import random @@ -541,8 +540,7 @@ def test_authorized_user_can_download_submission(journalist_app, assert response.mimetype == 'application/pgp-encrypted' # Response should have Etag field with hash - assert response.headers['ETag'] == '"sha256:{}"'.format( - hashlib.sha256(response.data).hexdigest()) + assert response.headers['ETag'].startswith('sha256:') def test_authorized_user_can_download_reply(journalist_app, test_files, @@ -562,8 +560,7 @@ def test_authorized_user_can_download_reply(journalist_app, test_files, assert response.mimetype == 'application/pgp-encrypted' # Response should have Etag field with hash - assert response.headers['ETag'] == '"sha256:{}"'.format( - hashlib.sha256(response.data).hexdigest()) + assert response.headers['ETag'].startswith('sha256:') def test_authorized_user_can_get_current_user_endpoint(journalist_app, @@ -866,3 +863,73 @@ def test_malformed_auth_token(journalist_app, journalist_api_token): resp = app.get(url, headers={'Authorization': 'too many {}'.format(journalist_api_token)}) assert resp.status_code == 403 + + +def test_submission_download_generates_checksum(journalist_app, + journalist_api_token, + test_source, + test_submissions, + mocker): + submission = test_submissions['submissions'][0] + assert submission.checksum is None # precondition + + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_submission', + source_uuid=test_source['uuid'], + submission_uuid=submission.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + # check that the submission checksum was added + fetched_submission = Submission.query.get(submission.id) + assert fetched_submission.checksum + + mock_add_checksum = mocker.patch('journalist_app.utils.add_checksum_for_file') + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_submission', + source_uuid=test_source['uuid'], + submission_uuid=submission.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + fetched_submission = Submission.query.get(submission.id) + assert fetched_submission.checksum + # we don't want to recalculat this value + assert not mock_add_checksum.called + + +def test_reply_download_generates_checksum(journalist_app, + journalist_api_token, + test_source, + test_files, + mocker): + reply = test_files['replies'][0] + assert reply.checksum is None # precondition + + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_reply', + source_uuid=test_source['uuid'], + reply_uuid=reply.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + # check that the reply checksum was added + fetched_reply = Reply.query.get(reply.id) + assert fetched_reply.checksum + + mock_add_checksum = mocker.patch('journalist_app.utils.add_checksum_for_file') + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_reply', + source_uuid=test_source['uuid'], + reply_uuid=reply.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + fetched_reply = Reply.query.get(reply.id) + assert fetched_reply.checksum + # we don't want to recalculat this value + assert not mock_add_checksum.called diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -9,7 +9,10 @@ os.environ['SECUREDROP_ENV'] = 'test' # noqa from . import utils -from store import Storage +from db import db +from journalist_app import create_app +from models import Submission, Reply +from store import Storage, queued_add_checksum_for_file, async_add_checksum_for_file def create_file_in_source_dir(config, filesystem_id, filename): @@ -163,3 +166,86 @@ def test_rename_submission_with_invalid_filename(journalist_app): # None of the above files exist, so we expect the attempt to rename # the submission to fail and the original filename to be returned. assert original_filename == returned_filename + + [email protected]('db_model', [Submission, Reply]) +def test_add_checksum_for_file(config, db_model): + ''' + Check that when we execute the `add_checksum_for_file` function, the database object is + correctly updated with the actual hash of the file. + + We have to create our own app in order to have more control over the SQLAlchemy sessions. The + fixture pushes a single app context that forces us to work within a single transaction. + ''' + app = create_app(config) + + with app.app_context(): + db.create_all() + source, _ = utils.db_helper.init_source_without_keypair() + target_file_path = app.storage.path(source.filesystem_id, '1-foo-msg.gpg') + test_message = b'hash me!' + expected_hash = 'f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927' + + with open(target_file_path, 'wb') as f: + f.write(test_message) + + if db_model == Submission: + db_obj = Submission(source, target_file_path) + else: + journalist, _ = utils.db_helper.init_journalist() + db_obj = Reply(journalist, source, target_file_path) + + db.session.add(db_obj) + db.session.commit() + db_obj_id = db_obj.id + + queued_add_checksum_for_file(db_model, + db_obj_id, + target_file_path, + app.config['SQLALCHEMY_DATABASE_URI']) + + with app.app_context(): + # requery to get a new object + db_obj = db_model.query.filter_by(id=db_obj_id).one() + assert db_obj.checksum == 'sha256:' + expected_hash + + [email protected]('db_model', [Submission, Reply]) +def test_async_add_checksum_for_file(config, db_model): + ''' + Check that when we execute the `add_checksum_for_file` function, the database object is + correctly updated with the actual hash of the file. + + We have to create our own app in order to have more control over the SQLAlchemy sessions. The + fixture pushes a single app context that forces us to work within a single transaction. + ''' + app = create_app(config) + + with app.app_context(): + db.create_all() + source, _ = utils.db_helper.init_source_without_keypair() + target_file_path = app.storage.path(source.filesystem_id, '1-foo-msg.gpg') + test_message = b'hash me!' + expected_hash = 'f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927' + + with open(target_file_path, 'wb') as f: + f.write(test_message) + + if db_model == Submission: + db_obj = Submission(source, target_file_path) + else: + journalist, _ = utils.db_helper.init_journalist() + db_obj = Reply(journalist, source, target_file_path) + + db.session.add(db_obj) + db.session.commit() + db_obj_id = db_obj.id + + job = async_add_checksum_for_file(db_obj) + + utils.async.wait_for_redis_worker(job, timeout=5) + + with app.app_context(): + # requery to get a new object + db_obj = db_model.query.filter_by(id=db_obj_id).one() + assert db_obj.checksum == 'sha256:' + expected_hash diff --git a/securedrop/tests/utils/async.py b/securedrop/tests/utils/async.py --- a/securedrop/tests/utils/async.py +++ b/securedrop/tests/utils/async.py @@ -4,6 +4,7 @@ """ import time +# This is an arbitarily defined value in the SD codebase and not something from rqworker REDIS_SUCCESS_RETURN_VALUE = 'success'
Etag header set by Journalist API is not sha256sum of file ## Description Etag header of file download files for Journalist API (https://github.com/freedomofpress/securedrop/blob/develop/securedrop/journalist_app/utils.py#L337) always returns `sha256sum:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855`. This the sha256sum of an empty string. This is due `response.get_data()` returning an empty object. ## Steps to Reproduce 1. Upload a file or send a message to the source interface 2. Set up admin account and use the Journalist API to retrieve files: a. Ensure you aren't using a staging environment or remove `Header unset etag` directive from `/etc/apache2/sites-available/journalist.conf` and restart Apache2 b. `curl -I <download_url_of_file>` and retain the value of header `Etag: sha256sum:<SHA256sum goes here>` c. `curl -O <download_url_of_file>` and `sha256sum download`. Observe the hash is different from the previous step d. `echo -ne "" | sha256sum` and observe the hash is identical to the etags above ## Expected Behavior 1. Etag value should be sha256 sum of file ## Actual Behavior 1. Etag value is sha256sum of an empty string ## Comments It makes sense that the response is empty because the file is sent as attachment: https://github.com/freedomofpress/securedrop/blob/develop/securedrop/journalist_app/utils.py#L333 Since the hash is computed every time a file is downloaded, it might use significant amount of resources server-side if there are large files that are downloaded at the time time. We should consider hashing the files at creation time, and storing the hash values in the database. This will also allow us to verify file integrity (e.g. when restoring backups)
2019-04-02T16:28:25Z
[]
[]
freedomofpress/securedrop
4,319
freedomofpress__securedrop-4319
[ "4160" ]
14ddf41f08f41dfc0bf783758f30706e85ff47c0
diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py --- a/securedrop/source_app/info.py +++ b/securedrop/source_app/info.py @@ -7,10 +7,6 @@ def make_blueprint(config): view = Blueprint('info', __name__) - @view.route('/disable-noscript-xss') - def disable_noscript_xss(): - return render_template("disable-noscript-xss.html") - @view.route('/tor2web-warning') def tor2web_warning(): return render_template("tor2web-warning.html")
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -223,9 +223,6 @@ def _source_tor2web_warning(self): def _source_why_journalist_key(self): self.driver.get(self.source_location + "/why-journalist-key") - def _source_disable_noscript_xss(self): - self.driver.get(self.source_location + "/disable-noscript-xss") - def _source_waits_for_session_to_timeout(self, session_length_minutes): time.sleep(session_length_minutes * 60 + 0.1) diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -17,6 +17,3 @@ def test_lookup_codename_hint(self): self._source_chooses_to_login() self._source_proceeds_to_login() self._source_sees_no_codename() - - def test_disable_noscript_xss(self): - self._source_disable_noscript_xss() diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -516,14 +516,6 @@ def test_why_journalist_key(source_app): assert "Why download the journalist's public key?" in text -def test_disable_noscript_xss(source_app): - with source_app.test_client() as app: - resp = app.get(url_for('info.disable_noscript_xss')) - assert resp.status_code == 200 - text = resp.data.decode('utf-8') - assert "<h1>Turn off NoScript's cross-site request sanitization setting</h1>" in text - - def test_metadata_route(source_app): with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: mocked_platform.return_value = ("Ubuntu", "16.04", "xenial")
Remove NoScript workaround instructions from Source Interface In #4153, we introduced instructions to the Source Interface for working around a NoScript/Firefox issue which causes some uploads to fail. The upstream NoScript issue (closed as a Firefox issue is being pinpointed as the root cause) is https://github.com/hackademix/noscript/issues/64; the best candidate for the underlying Firefox issue appears to be: https://bugzilla.mozilla.org/show_bug.cgi?id=1506562 These instructions are cumbersome and the upstream issue will hopefully be resolved ASAP. We'll need to continue to monitor upstream and remove the instructions once a fix has landed in the latest releases of Tor Browser Bundle & Tails.
I've filed https://bugzilla.mozilla.org/show_bug.cgi?id=1532530 for additional follow-up. Is this Issue more of a "to do" for once the issue has been fixed on the Mozilla/NS end? Yes, exactly. Hence the "blocked" label, which I'll remove once the upstream issue is fixed and the fix can be expected to reach most Tor browser users by the time of the next SecureDrop release milestone. See tbb-dev thread [Firefox/NoScript bug with major downstream effects](https://lists.torproject.org/pipermail/tbb-dev/2019-March/thread.html#970) for discussion with Tor browser developers, as well. Upstream Tor browser issue proposing to change the default, & discussion: https://trac.torproject.org/projects/tor/ticket/29733 The NoScript fix is included in Tor 8.0.8 (released March 21) so we should be able to revert the warning potentially for 0.12.2, pending QA: https://blog.torproject.org/new-release-tor-browser-808
2019-04-03T00:11:43Z
[]
[]
freedomofpress/securedrop
4,325
freedomofpress__securedrop-4325
[ "4305" ]
49400d004c404834895b33e22d257c2da1a314b5
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -16,7 +16,7 @@ from db import db from models import Source from request_that_secures_file_uploads import RequestThatSecuresFileUploads -from source_app import main, info, api +from source_app import main, info, api, disable from source_app.decorators import ignore_static from source_app.utils import logged_in from store import Storage @@ -106,6 +106,18 @@ def handle_csrf_error(e): for module in [main, info, api]: app.register_blueprint(module.make_blueprint(config)) # type: ignore + @app.before_request + @ignore_static + def setup_i18n(): + """Store i18n-related values in Flask's special g object""" + g.locale = i18n.get_locale(config) + g.text_direction = i18n.get_text_direction(g.locale) + g.html_lang = i18n.locale_to_rfc_5646(g.locale) + g.locales = i18n.get_locale2name() + + # Disables the app if the server is running Trusty past its EOL date. + disable.disable_app(app) + @app.before_request @ignore_static def check_tor2web(): @@ -125,10 +137,6 @@ def check_tor2web(): @ignore_static def setup_g(): """Store commonly used values in Flask's special g object""" - g.locale = i18n.get_locale(config) - g.text_direction = i18n.get_text_direction(g.locale) - g.html_lang = i18n.locale_to_rfc_5646(g.locale) - g.locales = i18n.get_locale2name() if 'expires' in session and datetime.utcnow() >= session['expires']: msg = render_template('session_timeout.html') diff --git a/securedrop/source_app/decorators.py b/securedrop/source_app/decorators.py --- a/securedrop/source_app/decorators.py +++ b/securedrop/source_app/decorators.py @@ -18,7 +18,7 @@ def ignore_static(f): a static resource.""" @wraps(f) def decorated_function(*args, **kwargs): - if request.path.startswith('/static'): + if request.path.startswith('/static') or request.path == '/org-logo': return # don't execute the decorated function return f(*args, **kwargs) return decorated_function diff --git a/securedrop/source_app/disable.py b/securedrop/source_app/disable.py new file mode 100644 --- /dev/null +++ b/securedrop/source_app/disable.py @@ -0,0 +1,17 @@ +from datetime import date +from flask import render_template +from source_app.decorators import ignore_static +import platform + +XENIAL_VER = "16.04" +TRUSTY_DISABLE_DATE = date(2019, 4, 30) + + +def disable_app(app): + + @app.before_request + @ignore_static + def disable_ui(): + if(platform.linux_distribution()[1] != XENIAL_VER and + date.today() > TRUSTY_DISABLE_DATE): + return render_template('disabled.html')
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import gzip import json +import platform import re import subprocess @@ -13,14 +14,21 @@ import utils import version +from datetime import date from db import db from models import Source, Reply from source_app import main as source_app_main from source_app import api as source_app_api +from source_app import disable as source_app_disable from utils.db_helper import new_codename from utils.instrument import InstrumentedApp overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1) +TRUSTY_DISABLED_ENDPOINTS = ['main.index', 'main.lookup', 'main.generate', 'main.login', + 'info.download_journalist_pubkey', 'info.tor2web_warning', + 'info.recommend_tor_browser', 'info.why_download_journalist_pubkey'] +STATIC_ASSETS = ['css/source.css', 'i/custom_logo.png', 'i/font-awesome/fa-globe-black.png', + 'i/favicon.png'] def test_page_not_found(source_app): @@ -696,3 +704,53 @@ def test_source_can_only_delete_own_replies(source_app): reply = Reply.query.filter_by(filename=filename).one() assert reply.deleted_by_source + + +def test_source_disabled_when_trusty_is_eol(config, source_app): + with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: + mocked_platform.return_value = ("Ubuntu", "14.04", "Trusty") + + with source_app.test_client() as app: + source_app_disable.TRUSTY_DISABLE_DATE = date(2001, 1, 1) + + assert platform.linux_distribution()[1] == "14.04" + for endpoint in TRUSTY_DISABLED_ENDPOINTS: + resp = app.get(url_for(endpoint)) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "We're sorry, our SecureDrop is currently offline." in text + # Ensure static assets are properly served + for asset in STATIC_ASSETS: + resp = app.get(url_for('static', filename=asset)) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "We're sorry, our SecureDrop is currently offline." not in text + + +def test_source_not_disabled_before_trusty_eol(config, source_app): + with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: + mocked_platform.return_value = ("Ubuntu", "14.04", "Trusty") + + with source_app.test_client() as app: + source_app_disable.TRUSTY_DISABLE_DATE = date(2097, 1, 1) + assert platform.linux_distribution()[1] == "14.04" + for endpoint in TRUSTY_DISABLED_ENDPOINTS: + resp = app.get(url_for(endpoint), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "We're sorry, our SecureDrop is currently offline." not in text + + +def test_source_not_disabled_xenial(config, source_app): + with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: + mocked_platform.return_value = ("Ubuntu", "16.04", "Xenial") + + with source_app.test_client() as app: + source_app_disable.TRUSTY_DISABLE_DATE = date(2001, 1, 1) + + assert platform.linux_distribution()[1] == "16.04" + for endpoint in TRUSTY_DISABLED_ENDPOINTS: + resp = app.get(url_for(endpoint), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "We're sorry, our SecureDrop is currently offline." not in text
Disable Source Interface on instances running Trusty after April 30 > **NOTE: If you are an administrator and would like to receive help with the upgrade from Trusty to Xenial, please contact us via [email protected] ([GPG encrypted](https://securedrop.org/sites/default/files/fpf-email.asc)), or open an issue in our support portal if you are a member.** As [previously communicated](https://securedrop.org/news/advisory-why-you-must-manually-upgrade-your-securedrop-servers-april-30/), news organizations must upgrade to Ubuntu 16.04 by April 30, as Ubuntu 14.04 reaches EOL on that date. The Journalist Interface of impacted instances has [displayed a warning](https://github.com/freedomofpress/securedrop/issues/4027) since March 4, and extensive communications continue through all available channels. In our previous advisory, we have indicated that we will remove Trusty instances from the [SecureDrop directory](https://securedrop.org/directory/) after April 30, and that “we cannot guarantee that your SecureDrop instance will continue to function after that date." Running an unsupported operating system on the servers presents an unacceptable risk to sources and newsrooms alike. For this reason, after April 30, news organizations running SecureDrop on Trusty should no longer be able to communicate with sources until they upgrade. The Source Interface should show a neutral message similar to the following (final language TBD): > This SecureDrop instance is currently unavailable, pending a server upgrade. Please check back again later. All source interface functionality should be disabled. The journalist interface should continue to work as before. We should also plan additional comms through all available channels in early April (tracked internally). ## User Stories As a source, I want to have confidence that all my interactions via SecureDrop meet the highest standard of information security, so I face minimal risks of deanonymization by an adversary. As a news organization, I want my SecureDrop instance to be protected against external compromise at all times, so that existing submissions and correspondence are not exposed to adversaries. ## Acceptance Criteria **Given** that my SecureDrop servers are running Trusty after April 30 **When** I connect to the server’s Source Interface **Then** I should see a maintenance notice **And** all other Source Interface functionality should be disabled **Given** that my SecureDrop servers were running Trusty after April 30, but have since been upgraded to Xenial following [instructions](https://securedrop.org/xenial-upgrade) **When** I connect to the server’s Source Interface **Then** the Source Interface should operate normally **And** no warning message should be displayed
Pinging self to get on my radar... likely just messaging (or, time for a "sailed-ship" icon, heh)? Follow up from the discussion during sprint planning, this is a minimal implementation that would achieve this functionality. ```python # file `disable.py` DISABLE_DATE = date(2019, 4, 30) def disable_app(app): @app.before_request def disable_ui(): if date.now() > DISABLE_DATE: return send_file('path/to/static/file') ``` Then we we add the following lines to `{source,journalist}_app/__init__.py` ```python import disable def create_app(config): # snip # this must occur in before any other `before_request` decoration or extensions disable.disable_app(app) # snip ``` This works because if a `before_request` callback returns `None`, the request falls through to the next `before_request` callback before eventually hitting the endpoints. Thanks @heartsucker that solution looks good for the near term. I agree that the flask method is the most prudent in the near-term, but we must also be prepared to disable the source interface at either Apache or THS level if there's a remotely exploitable Tor, Apache, Python or Flask vulnerability in the future. Perhaps worth tracking in a follow-up ticket once a PR is opened. WIP branch here: https://github.com/freedomofpress/securedrop/tree/4305-disable-submissions-trusty-eol The current UX is very simple, and is identical to the current Source Interface generic error page: ![SecureDrop | Protecting Journalists and Sources - Mozilla Firefox_002](https://user-images.githubusercontent.com/15223328/55743491-702ce100-5a00-11e9-912c-426c96adc960.png) I'd assumed we were seeking to block both from submitting _and_ Messaging. I'd prefer to be clearer with users, and had composed the below—based off the current Index page (tho I moved the languages widget to below the logo, because I'd prefer that gets moved on the Index pages anyway): ![image](https://user-images.githubusercontent.com/8262612/55745600-7913b780-59ec-11e9-946b-47fc8cecadfd.png) ```<?xml version="1.0" encoding="UTF-8"?> <svg width="49px" height="19px" viewBox="0 0 49 19" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <!-- Generator: Sketch 52.3 (67297) - http://www.bohemiancoding.com/sketch --> <title>Group 15</title> <desc>Created with Sketch.</desc> <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> <g id="A" transform="translate(-544.000000, -370.000000)"> <g id="Group-15" transform="translate(544.000000, 370.000000)"> <path d="M10.2467,7.6739 L7.3597,7.6739 L7.3597,6.2299 C7.3597,5.4609 8.0337,4.9789 8.8037,4.9789 C9.5727,4.9789 10.2467,5.4609 10.2467,6.2299 L10.2467,7.6739 Z M11.4977,7.6739 L11.4977,6.2299 C11.4977,4.8829 10.1507,3.8249 8.8037,3.8249 C7.4557,3.8249 6.1087,4.8829 6.1087,6.2299 L6.1087,7.6739 C5.5317,7.6739 4.9547,8.2509 4.9547,8.8279 L4.9547,12.1959 C4.9547,12.8699 5.5317,13.4469 6.1087,13.4469 L11.4017,13.4469 C12.0747,13.4469 12.6527,12.8699 12.6527,12.2929 L12.6527,8.9249 C12.6527,8.2509 12.0747,7.6739 11.4977,7.6739 Z" id="Fill-1" fill="#83C341"></path> <path d="M44.1407,17.6929 L4.0207,17.6929 C2.0407,17.6929 0.4207,16.0729 0.4207,14.0929 L0.4207,4.0209 C0.4207,2.0409 2.0407,0.4209 4.0207,0.4209 L44.1407,0.4209 C46.1197,0.4209 47.7407,2.0409 47.7407,4.0209 L47.7407,14.0929 C47.7407,16.0729 46.1197,17.6929 44.1407,17.6929 Z" id="Stroke-3" stroke="#EDEDED" stroke-width="0.841"></path> <path d="M36.3693,11.3911 L37.8933,11.1501 C38.0283,11.4721 38.1973,11.6991 38.3983,11.8321 C38.5983,11.9661 38.8733,12.0331 39.2213,12.0331 C39.5793,12.0331 39.8653,11.9521 40.0823,11.7911 C40.2313,11.6821 40.3073,11.5481 40.3073,11.3911 C40.3073,11.2851 40.2673,11.1901 40.1913,11.1061 C40.1113,11.0251 39.8923,10.9271 39.5393,10.8101 C38.5883,10.4951 38.0003,10.2461 37.7733,10.0641 C37.4193,9.7791 37.2413,9.4061 37.2413,8.9451 C37.2413,8.4851 37.4133,8.0881 37.7573,7.7551 C38.2353,7.2911 38.9463,7.0591 39.8903,7.0591 C40.6393,7.0591 41.2063,7.1961 41.5903,7.4701 C41.9733,7.7451 42.2173,8.1161 42.3183,8.5831 L40.8653,8.8361 C40.7893,8.6231 40.6653,8.4641 40.4933,8.3591 C40.2593,8.2161 39.9773,8.1451 39.6483,8.1451 C39.3193,8.1451 39.0823,8.2001 38.9383,8.3091 C38.7943,8.4191 38.7223,8.5451 38.7223,8.6871 C38.7223,8.8341 38.7953,8.9541 38.9413,9.0501 C39.0323,9.1081 39.3273,9.2101 39.8243,9.3571 C40.5923,9.5801 41.1043,9.7991 41.3643,10.0151 C41.7293,10.3181 41.9133,10.6831 41.9133,11.1111 C41.9133,11.6631 41.6813,12.1421 41.2173,12.5481 C40.7533,12.9541 40.0983,13.1571 39.2543,13.1571 C38.4123,13.1571 37.7633,13.0021 37.3053,12.6931 C36.8453,12.3851 36.5343,11.9501 36.3693,11.3911" id="Fill-5" fill="#231F20"></path> <path d="M15.7906,12.981 L15.7906,5.107 L16.7576,5.107 L16.7576,7.932 C17.2086,7.409 17.7776,7.148 18.4656,7.148 C18.8876,7.148 19.2546,7.231 19.5666,7.398 C19.8776,7.564 20.1006,7.794 20.2356,8.088 C20.3696,8.381 20.4366,8.808 20.4366,9.366 L20.4366,12.981 L19.4696,12.981 L19.4696,9.366 C19.4696,8.883 19.3656,8.531 19.1556,8.311 C18.9466,8.09 18.6496,7.981 18.2666,7.981 C17.9796,7.981 17.7106,8.055 17.4576,8.204 C17.2066,8.352 17.0256,8.553 16.9186,8.808 C16.8116,9.062 16.7576,9.413 16.7576,9.86 L16.7576,12.981 L15.7906,12.981 Z" id="Fill-7" fill="#939597"></path> <path d="M24.2394,12.1162 L24.3794,12.9702 C24.1064,13.0272 23.8634,13.0562 23.6484,13.0562 C23.2984,13.0562 23.0254,13.0002 22.8324,12.8892 C22.6384,12.7792 22.5034,12.6332 22.4244,12.4512 C22.3444,12.2712 22.3054,11.8902 22.3054,11.3102 L22.3054,8.0292 L21.5964,8.0292 L21.5964,7.2772 L22.3054,7.2772 L22.3054,5.8642 L23.2664,5.2842 L23.2664,7.2772 L24.2394,7.2772 L24.2394,8.0292 L23.2664,8.0292 L23.2664,11.3642 C23.2664,11.6402 23.2844,11.8172 23.3184,11.8962 C23.3524,11.9742 23.4074,12.0372 23.4844,12.0842 C23.5614,12.1302 23.6724,12.1542 23.8154,12.1542 C23.9234,12.1542 24.0634,12.1412 24.2394,12.1162" id="Fill-9" fill="#939597"></path> <path d="M27.9552,12.1162 L28.0952,12.9702 C27.8232,13.0272 27.5792,13.0562 27.3642,13.0562 C27.0132,13.0562 26.7412,13.0002 26.5482,12.8892 C26.3542,12.7792 26.2182,12.6332 26.1402,12.4512 C26.0612,12.2712 26.0212,11.8902 26.0212,11.3102 L26.0212,8.0292 L25.3122,8.0292 L25.3122,7.2772 L26.0212,7.2772 L26.0212,5.8642 L26.9832,5.2842 L26.9832,7.2772 L27.9552,7.2772 L27.9552,8.0292 L26.9832,8.0292 L26.9832,11.3642 C26.9832,11.6402 27.0002,11.8172 27.0342,11.8962 C27.0672,11.9742 27.1242,12.0372 27.2002,12.0842 C27.2772,12.1302 27.3882,12.1542 27.5302,12.1542 C27.6382,12.1542 27.7802,12.1412 27.9552,12.1162" id="Fill-11" fill="#939597"></path> <path d="M30.4366,10.1612 C30.4366,10.8952 30.5846,11.4372 30.8816,11.7882 C31.1786,12.1402 31.5386,12.3152 31.9616,12.3152 C32.3916,12.3152 32.7586,12.1332 33.0656,11.7702 C33.3726,11.4062 33.5246,10.8432 33.5246,10.0802 C33.5246,9.3532 33.3746,8.8092 33.0756,8.4482 C32.7766,8.0862 32.4186,7.9052 32.0046,7.9052 C31.5926,7.9052 31.2276,8.0982 30.9116,8.4832 C30.5946,8.8682 30.4366,9.4272 30.4366,10.1612 Z M29.5606,15.1662 L29.5606,7.2772 L30.4406,7.2772 L30.4406,8.0182 C30.6496,7.7282 30.8836,7.5102 31.1456,7.3662 C31.4076,7.2202 31.7236,7.1482 32.0946,7.1482 C32.5826,7.1482 33.0126,7.2732 33.3836,7.5242 C33.7566,7.7742 34.0386,8.1282 34.2276,8.5842 C34.4166,9.0412 34.5126,9.5412 34.5126,10.0862 C34.5126,10.6692 34.4076,11.1952 34.1986,11.6622 C33.9896,12.1292 33.6846,12.4882 33.2866,12.7372 C32.8856,12.9852 32.4656,13.1102 32.0266,13.1102 C31.7046,13.1102 31.4156,13.0422 31.1596,12.9062 C30.9036,12.7692 30.6926,12.5982 30.5266,12.3902 L30.5266,15.1662 L29.5606,15.1662 Z" id="Fill-13" fill="#939597"></path> </g> </g> </g> </svg>``` Related general nit: Preference to avoid use of nrrdspeak like "instance" and "mode" in user-facing content, on the Source UI. Yes, most folks can figure those things out, but they impose an emotional burden onto users I'd like to be sensitive to avoid. Sources are already taking quite a leap by engaging with a SecureDrop at all. <3 Thanks @ninavizz , great points. Yes, submissions *and* replies will be disabled. I've updated the wording based on your feedback, what do you think?: ![sorry](https://user-images.githubusercontent.com/15223328/55747194-b76b9f80-5a09-11e9-8713-19d2d3ecf659.png) The rationale is that I would prefer if we kept the messaging generic as possible: since sources are likely to originate from the org's landing page, they can elect themselves to submit documents through other means (if they choose and if available). Does this make sense? @emkll It makes sense, but I still am not comfortable with the terseness of that. It’s very important to point users towards a path for probable success, when effectively slamming a door in their face (which this kinda is). @ninavizz, thanks for your helpful user-centric input here. We have to be careful communicating on another organization's behalf, even if that does make the message a bit more technical or opaque. It is ultimately the news organization's job to clarify the status of their SecureDrop on their landing page, and we cannot make judgments about this on their behalf or offer too much in the way of resolution. Even a statement like "We don't know when _our_ SecureDrop will be back online" IMO presupposes too much about the other organization. We should not speak on their behalf in this manner. Similarly, we cannot say with certainty that other contact methods are available on the landing page, or that the organization is currently prepared to process submissions by other means. I would therefore suggest sticking with simple language, e.g.: > **We're sorry, this SecureDrop is currently offline.** > Please try again later. Additional information may be available on the organization's website. (Note the avoidance of "our".) @eloquence I hear yous and @emkll's concerns. > Please try again later Feeds into a common mental model other online services have established in most users, that later the same day, or in a few days, might work. We however, know that is unlikely to work. > ...on the organization's website... Again, working with user mental models here; that makes this look like a phishing page, or like they're on the wrong page. Especially for orgs that don't use a custom logo and instead use the SecureDrop logo. Why would any org not speak to its users in the first-person? It's a standard expectation; and with commercial integrations, 3rd parties do it all the time in their generic/default copy. It's not possible with any software solution, to 100% appease the needs of the customer, the end user, and larger behavioral interests. So tradeoffs have to be made. Are we out to protect the users first, or to represent all organizations as neutrally as absolutely possible, first? > We don't know when... IMHO is appropriately vague for messaging users. Bottom line: we've given orgs over a month to update their systems. If they're neither communicating with FPF nor with their own users, why are we sticking our necks out for them at the expense of user expectations management, in this messaging? I feel it's entirely fair if an org gets up in a huff, to say just that: we gave y'all a chance, we went out of our way, and we needed to shift our prioritization to the users. ``` We’re sorry. Our SecureDrop is offline right now. We don’t know when our SecureDrop will be live again. We encourage you to consider other secure means of communication with us, while our SecureDrop is unavailable. ``` If an org has in fact not yet upgraded to Xenial by now, what is the likelihood that they ever will—if they've also ignored all our outreach? That is also a very real thing to consider, that I want to respect Source expectations around. Regardless of news orgs ignoring the need to upgrade, I don't think it's appropriate for FPF to "step in" and communicate on their behalf as "We" on the source interface index page, even in vague terms like "We don't know when our SecureDrop will be live again", or in terms of giving specific recommendations to sources. But I also need to reiterate that this is quite time-sensitive, so one way or another we should finalize the wording ASAP. @redshiftzero could you act as a tie-breaker on this wording question? I would be ok with killing the "We" to get more neutral with `It is not known when our SecureDrop will be... `but I feel it's imperative for news orgs to always reference their instance in the **_our._** In fact, I feel that should be governed as a requirement (at least to get listed on the Directory) in the docs, too. Eventually, with using a logo to TBD specifications that presents the instance as _theirs_. Who owns the servers, and SecureDrop not being this global monolith thing, is a very important concept to communicate to _Sources_. This is the Customer's SecureDrop. We're irresponsible to not speak directly to that. I'd suggest the following compromise language: > **We're sorry, our SecureDrop is currently offline.** > Please try again later, and check our website for more information. I still think "try again later" is fine; it could indeed be days (this change may motivate organizations to upgrade). Beyond that I don't think we should make statements on the org's behalf. I'm OK with "our" for statements that are unproblematic and unlikely to contradict the news org's other public comms. But consider: - Some orgs ask to exclusively be contacted via SecureDrop; - Some orgs may update their landing page with maintenance dates etc. In those cases we should not contradict what the org is saying by making presumptions on their behalf like "We don't know when" or "consider X". In future, if the source interface becomes more customizable, we may be able to offer richer templates, but given that there's no org preview or sign-off involved in rolling out this change, nor any customizability, I would argue for conservatism whenever we speak _as_ the news organization to the user. That said, I'm fine with whatever final language Jen signs off on. Ok—fair point on the "other means to communicate with us..." bit. Only remaining nit then, is structural—and a general UI copywriting best-practices nit I have across SD documentation, Source UI, Journalist UI... everywhere. Sentence conjunctions are good for prose, and they make articles interesting to read. They make instructional text more difficult to parse, though. Guiding behavior, not ideas, is the purpose of instructional text. I personally cannot compose sentences without conjunctions, but for messaging users it's important to break-apart conjunctions into stand-alone thoughts/sentences. No commas. Small, simple sentences. Simple words. KISS: Keep It Simple, Stupid. > > We're sorry, our SecureDrop is currently offline. > Please check our website for more information. ...and then my jury is out, on how to fit in the "try again later" part. As a total aside from my hesitance to include it at all. I know, I used a comma after "We're sorry," above. A rare exception to keep the tone friendly. Thank you for working this through with me, Erik. And Jen, I trust this in your no-pressure-at-all hands. 😸 OK how about we go with: > We're sorry, our SecureDrop is currently offline. Please try again later. Check our website for more information. Despite the fact there's no guarantee the organization has written anything about their SecureDrop downtime on their website, it at least provides a pointer for finding other contact methods. There's only so much we can do here, I agree we should only be making very uncontroversial statements since we are speaking from the perspective of the news organization.
2019-04-08T19:26:49Z
[]
[]
freedomofpress/securedrop
4,346
freedomofpress__securedrop-4346
[ "4350" ]
4410a6cc8804a5c0a91a6abbc9640ba525196a12
diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py --- a/install_files/ansible-base/callback_plugins/ansible_version_check.py +++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py @@ -21,7 +21,7 @@ class CallbackModule(CallbackBase): def __init__(self): # Can't use `on_X` because this isn't forwards compatible # with Ansible 2.0+ - required_version = '2.6.8' # Keep synchronized with requirements files + required_version = '2.6.14' # Keep synchronized with requirements files if not ansible.__version__.startswith(required_version): print_red_bold( "SecureDrop restriction: only Ansible {version}.*"
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1148,10 +1148,10 @@ def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): log_event = mocked_error_logger.call_args[0][0] if six.PY2: assert ("Adding user 'username' failed: (__builtin__.NoneType) " - "None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']") in log_event + "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event else: assert ("Adding user 'username' failed: (builtins.NoneType) " - "None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']") in log_event + "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin):
Update SQLAlchemy to 1.3.0 ## Description Multiple vulnerabilities were reported in SQLAlchemy <1.3.0: - CVE-2019-7164 (moderate) : SQLAlchemy through 1.2.17 and 1.3.x through 1.3.0b2 allows SQL Injection via the order_by parameter. - CVE-2019-7548 (moderate) : SQLAlchemy 1.2.17 has SQL Injection when the group_by parameter can be controlled. We should update this dependency ASAP for the SecureDrop application.
2019-04-15T19:07:04Z
[]
[]
freedomofpress/securedrop
4,349
freedomofpress__securedrop-4349
[ "3933" ]
839415f2db9439e7b0c2a827b713da54ace28204
diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py similarity index 86% rename from securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py rename to securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py --- a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns.py +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py @@ -1,4 +1,4 @@ -"""add checksum columns +"""add checksum columns and revoke token table Revision ID: b58139cfdc8c Revises: f2833ac34bb6 @@ -36,6 +36,16 @@ def upgrade(): with op.batch_alter_table('submissions', schema=None) as batch_op: batch_op.add_column(sa.Column('checksum', sa.String(length=255), nullable=True)) + op.create_table( + 'revoked_tokens', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('journalist_id', sa.Integer(), nullable=True), + sa.Column('token', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('token') + ) + try: app = create_app(config) @@ -77,6 +87,8 @@ def upgrade(): def downgrade(): + op.drop_table('revoked_tokens') + with op.batch_alter_table('submissions', schema=None) as batch_op: batch_op.drop_column('checksum') diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -18,7 +18,8 @@ from db import db from journalist_app import account, admin, api, main, col from journalist_app.utils import (get_source, logged_in, - JournalistInterfaceSessionInterface) + JournalistInterfaceSessionInterface, + cleanup_expired_revoked_tokens) from models import Journalist from store import Storage from worker import rq_worker_queue @@ -124,6 +125,10 @@ def _handle_http_exception(error): template_filters.rel_datetime_format app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat + @app.before_first_request + def expire_blacklisted_tokens(): + return cleanup_expired_revoked_tokens() + @app.before_request def setup_g(): """Store commonly used values in Flask's special g object""" diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -10,7 +10,7 @@ from db import db from journalist_app import utils -from models import (Journalist, Reply, Source, Submission, +from models import (Journalist, Reply, Source, Submission, RevokedToken, LoginThrottledException, InvalidUsernameException, BadTokenException, WrongPasswordException) from store import NotEncrypted @@ -75,10 +75,18 @@ def get_endpoints(): @api.before_request def validate_data(): if request.method == 'POST': - # flag and star can have empty payloads + # flag, star, and logout can have empty payloads if not request.data: - if ('flag' not in request.path and 'star' not in request.path): - return abort(400, 'malformed request') + dataless_endpoints = [ + 'add_star', + 'remove_star', + 'flag', + 'logout', + ] + for endpoint in dataless_endpoints: + if request.endpoint == 'api.' + endpoint: + return + return abort(400, 'malformed request') # other requests must have valid JSON payload else: try: @@ -309,6 +317,16 @@ def get_current_user(): user = get_user_object(request) return jsonify(user.to_json()), 200 + @api.route('/logout', methods=['POST']) + @token_required + def logout(): + user = get_user_object(request) + auth_token = request.headers.get('Authorization').split(" ")[1] + revoked_token = RevokedToken(token=auth_token, journalist_id=user.id) + db.session.add(revoked_token) + db.session.commit() + return jsonify({'message': 'Your token has been revoked.'}), 200 + def _handle_api_http_exception(error): # Workaround for no blueprint-level 404/5 error handlers, see: # https://github.com/pallets/flask/issues/503#issuecomment-71383286 diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -13,7 +13,7 @@ from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, WrongPasswordException, LoginThrottledException, BadTokenException, SourceStar, - PasswordError, Submission) + PasswordError, Submission, RevokedToken) from rm import srm from store import add_checksum_for_file from worker import rq_worker_queue @@ -353,3 +353,18 @@ def save_session(self, app, session, response): else: super(JournalistInterfaceSessionInterface, self).save_session( app, session, response) + + +def cleanup_expired_revoked_tokens(): + """Remove tokens that have now expired from the revoked token table.""" + + revoked_tokens = db.session.query(RevokedToken).all() + + for revoked_token in revoked_tokens: + if Journalist.validate_token_is_not_expired_or_invalid(revoked_token.token): + pass # The token has not expired, we must keep in the revoked token table. + else: + # The token is no longer valid, remove from the revoked token table. + db.session.delete(revoked_token) + + db.session.commit() diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -567,6 +567,16 @@ def generate_api_token(self, expiration): current_app.config['SECRET_KEY'], expires_in=expiration) return s.dumps({'id': self.id}).decode('ascii') + @staticmethod + def validate_token_is_not_expired_or_invalid(token): + s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY']) + try: + s.loads(token) + except BadData: + return None + + return True + @staticmethod def validate_api_token_and_get_user(token): s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY']) @@ -574,6 +584,11 @@ def validate_api_token_and_get_user(token): data = s.loads(token) except BadData: return None + + revoked_token = RevokedToken.query.filter_by(token=token).one_or_none() + if revoked_token is not None: + return None + return Journalist.query.get(data['id']) def to_json(self): @@ -598,3 +613,16 @@ class JournalistLoginAttempt(db.Model): def __init__(self, journalist): self.journalist_id = journalist.id + + +class RevokedToken(db.Model): + + """ + API tokens that have been revoked either through a logout or other revocation mechanism. + """ + + __tablename__ = 'revoked_tokens' + + id = Column(Integer, primary_key=True) + journalist_id = Column(Integer, ForeignKey('journalists.id')) + token = db.Column(db.Text, nullable=False, unique=True)
diff --git a/securedrop/tests/migrations/migration_b58139cfdc8c.py b/securedrop/tests/migrations/migration_b58139cfdc8c.py --- a/securedrop/tests/migrations/migration_b58139cfdc8c.py +++ b/securedrop/tests/migrations/migration_b58139cfdc8c.py @@ -151,6 +151,8 @@ def check_upgrade(self): and without being able to inject this config, the checksum function won't succeed. The above `load_data` function provides data that can be manually verified by checking the `rqworker` log file in `/tmp/`. + + The other part of the migration, creating a table, cannot be tested regardless. ''' pass @@ -175,9 +177,15 @@ def load_data(self): self.create_submission(checksum=True) self.create_reply(checksum=True) + # add a revoked token for enable a foreign key connection + self.add_revoked_token() + def check_downgrade(self): ''' Verify that the checksum column is now gone. + + The dropping of the revoked_tokens table cannot be checked. If the migration completes, + then it wokred correctly. ''' with self.app.app_context(): sql = "SELECT * FROM submissions" @@ -197,3 +205,13 @@ def check_downgrade(self): submission['checksum'] except NoSuchColumnError: pass + + def add_revoked_token(self): + params = { + 'journalist_id': self.journalist_id, + 'token': 'abc123', + } + sql = '''INSERT INTO revoked_tokens (journalist_id, token) + VALUES (:journalist_id, :token) + ''' + db.engine.execute(text(sql), **params) diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py --- a/securedrop/tests/test_i18n.py +++ b/securedrop/tests/test_i18n.py @@ -25,6 +25,7 @@ os.environ['SECUREDROP_ENV'] = 'test' # noqa from sdconfig import SDConfig +from db import db import i18n import i18n_tool import journalist_app as journalist_app_module @@ -217,6 +218,8 @@ def test_i18n(journalist_app, config): # grabs values at init time and we can't inject them later. for app in (journalist_app_module.create_app(fake_config), source_app.create_app(fake_config)): + with app.app_context(): + db.create_all() assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES verify_i18n(app) diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -10,7 +10,7 @@ from itsdangerous import TimedJSONWebSignatureSerializer from db import db -from models import Journalist, Reply, Source, SourceStar, Submission +from models import Journalist, Reply, Source, SourceStar, Submission, RevokedToken os.environ['SECUREDROP_ENV'] = 'test' # noqa from .utils.api_helper import get_api_headers @@ -933,3 +933,20 @@ def test_reply_download_generates_checksum(journalist_app, assert fetched_reply.checksum # we don't want to recalculat this value assert not mock_add_checksum.called + + +def test_revoke_token(journalist_app, test_journo, journalist_api_token): + with journalist_app.test_client() as app: + # without token 403's + resp = app.post(url_for('api.logout')) + assert resp.status_code == 403 + + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + revoked_token = RevokedToken.query.filter_by(token=journalist_api_token).one() + assert revoked_token.journalist_id == test_journo['id'] + + resp = app.get(url_for('api.get_all_sources'), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 403 diff --git a/securedrop/tests/test_journalist_utils.py b/securedrop/tests/test_journalist_utils.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_journalist_utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +from flask import url_for +import os +import pytest +import random + +from models import RevokedToken +from sqlalchemy.orm.exc import NoResultFound + +from journalist_app.utils import cleanup_expired_revoked_tokens + +os.environ['SECUREDROP_ENV'] = 'test' # noqa +from .utils.api_helper import get_api_headers + +random.seed('◔ ⌣ ◔') + + +def test_revoke_token_cleanup_does_not_delete_tokens_if_not_expired(journalist_app, test_journo, + journalist_api_token): + with journalist_app.test_client() as app: + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + cleanup_expired_revoked_tokens() + + revoked_token = RevokedToken.query.filter_by(token=journalist_api_token).one() + assert revoked_token.journalist_id == test_journo['id'] + + +def test_revoke_token_cleanup_does_deletes_tokens_that_are_expired(journalist_app, test_journo, + journalist_api_token, mocker): + with journalist_app.test_client() as app: + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + # Mock response from expired token method when token is expired + mocker.patch('journalist_app.admin.Journalist.validate_token_is_not_expired_or_invalid', + return_value=None) + cleanup_expired_revoked_tokens() + + with pytest.raises(NoResultFound): + RevokedToken.query.filter_by(token=journalist_api_token).one() diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py --- a/securedrop/tests/test_template_filters.py +++ b/securedrop/tests/test_template_filters.py @@ -5,6 +5,7 @@ from flask import session os.environ['SECUREDROP_ENV'] = 'test' # noqa +from db import db import i18n import i18n_tool import journalist_app @@ -110,6 +111,8 @@ def do_test(config, create_app): pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', l) app = create_app(config) + with app.app_context(): + db.create_all() assert i18n.LOCALES == config.SUPPORTED_LOCALES verify_filesizeformat(app)
Support API token blacklisting and add API /logout endpoint ## Description We should: * store a table of blacklisted API tokens server-side * add a /logout endpoint to add API tokens to the blacklist * ensure that tokens used to access token_required endpoints are not in the blacklist, else we 403
2019-04-16T12:42:55Z
[]
[]
freedomofpress/securedrop
4,365
freedomofpress__securedrop-4365
[ "4294" ]
7765fde1f94b92615e75345a18c1399202f958d2
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -37,6 +37,23 @@ 'BCDEFGHIJKLMNOPQRSTUVWXYZ') +def monkey_patch_delete_handle_status(self, key, value): + """Parse a status code from the attached GnuPG process. + :raises: :exc:`~exceptions.ValueError` if the status message is unknown. + """ + if key in ("DELETE_PROBLEM", "KEY_CONSIDERED"): + self.status = self.problem_reason.get(value, "Unknown error: %r" + % value) + elif key in ("PINENTRY_LAUNCHED"): + self.status = key.replace("_", " ").lower() + else: + raise ValueError("Unknown status message: %r" % key) + + +# Monkey patching to resolve https://github.com/freedomofpress/securedrop/issues/4294 +gnupg._parsers.DeleteResult._handle_status = monkey_patch_delete_handle_status + + class CryptoException(Exception): pass
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -268,6 +268,24 @@ def test_delete_reply_keypair(source_app, test_source): assert source_app.crypto_util.getkey(fid) is None +def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source, + mocker, capsys): + """ + Regression test for https://github.com/freedomofpress/securedrop/issues/4294 + """ + fid = test_source['filesystem_id'] + + # Patch private python-gnupg method to reproduce the issue in #4294 + mocker.patch('pretty_bad_protocol._util._separate_keyword', + return_value=('PINENTRY_LAUNCHED', 'does not matter')) + + source_app.crypto_util.delete_reply_keypair(fid) + + captured = capsys.readouterr() + assert "ValueError: Unknown status message: 'PINENTRY_LAUNCHED'" not in captured.err + assert source_app.crypto_util.getkey(fid) is None + + def test_delete_reply_keypair_no_key(source_app): """No exceptions should be raised when provided a filesystem id that does not exist.
GPG PINENTRY_LAUNCHED error when sources are deleted ## Description Deleting a source raises this error. ```pytb Exception in thread Thread-39: Traceback (most recent call last): File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner self.run() File "/usr/lib/python2.7/threading.py", line 754, in run self.__target(*self.__args, **self.__kwargs) File "/usr/local/lib/python2.7/dist-packages/pretty_bad_protocol/_meta.py", line 670, in _read_response result._handle_status(keyword, value) File "/usr/local/lib/python2.7/dist-packages/pretty_bad_protocol/_parsers.py", line 1024, in _handle_status raise ValueError("Unknown status message: %r" % key) ValueError: Unknown status message: u'PINENTRY_LAUNCHED' ``` ## Steps to Reproduce - Start container - Start client - Delete source ## Expected Behavior No errors. ## Actual Behavior There are errors but the HTTP code is still 200, and the key is still deleted on the server side.
I can see the same errors. Background: I made this change 62bd71a97d10a6d4aac4c20bbb1b71fac12ddb28 such that secret keys would be properly deleted on Xenial (due to [upstream issues](https://dev.gnupg.org/T3465) with `--pinentry-mode loopback` and `--delete-secret-keys`)
2019-04-24T00:03:16Z
[]
[]
freedomofpress/securedrop
4,391
freedomofpress__securedrop-4391
[ "4361" ]
c24f61f7408b7e8f412d14ea3eab441178171938
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -67,6 +67,13 @@ def create(): # Issue 2386: don't log in on duplicates del session['codename'] + + # Issue 4361: Delete 'logged_in' if it's in the session + try: + del session['logged_in'] + except KeyError: + pass + abort(500) else: os.mkdir(current_app.storage.path(filesystem_id))
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -146,7 +146,31 @@ def test_generate_too_long_codename(source_app): ) -def test_create_duplicate_codename(source_app): +def test_create_duplicate_codename_logged_in_not_in_session(source_app): + with patch.object(source.app.logger, 'error') as logger: + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + + # Create a source the first time + resp = app.post(url_for('main.create'), follow_redirects=True) + assert resp.status_code == 200 + codename = session['codename'] + + with source_app.test_client() as app: + # Attempt to add the same source + with app.session_transaction() as sess: + sess['codename'] = codename + resp = app.post(url_for('main.create'), follow_redirects=True) + logger.assert_called_once() + assert ("Attempt to create a source with duplicate codename" + in logger.call_args[0][0]) + assert resp.status_code == 500 + assert 'codename' not in session + assert 'logged_in' not in session + + +def test_create_duplicate_codename_logged_in_in_session(source_app): with patch.object(source.app.logger, 'error') as logger: with source_app.test_client() as app: resp = app.get(url_for('main.generate')) @@ -157,12 +181,17 @@ def test_create_duplicate_codename(source_app): assert resp.status_code == 200 # Attempt to add the same source - app.post(url_for('main.create'), follow_redirects=True) + resp = app.post(url_for('main.create'), follow_redirects=True) logger.assert_called_once() assert ("Attempt to create a source with duplicate codename" in logger.call_args[0][0]) + assert resp.status_code == 500 assert 'codename' not in session + # Reproducer for bug #4361 + resp = app.post(url_for('main.index'), follow_redirects=True) + assert 'logged_in' not in session + def test_lookup(source_app): """Test various elements on the /lookup page."""
Source Interface requests fail with 500 error, due to session issue ## Description In some situations, requests to the source interface may start to fail, returning the 500 error page. Once the 500 errors start, they continue until the Tor Browser cache is cleared, either explicitly or by starting a new browser session. With source error logging enabled, the following errors are seen on failing requests: ``` [Thu Apr 18 09:46:09.516056 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] [2019-04-18 09:46:09,510] ERROR in app: Exception on / [GET] [Thu Apr 18 09:46:09.516238 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] Traceback (most recent call last): [Thu Apr 18 09:46:09.516279 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2292, in wsgi_app [Thu Apr 18 09:46:09.516317 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] response = self.full_dispatch_request() [Thu Apr 18 09:46:09.516363 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1815, in full_dispatch_request [Thu Apr 18 09:46:09.516442 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] rv = self.handle_user_exception(e) [Thu Apr 18 09:46:09.516479 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1718, in handle_user_exception [Thu Apr 18 09:46:09.516514 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] reraise(exc_type, exc_value, tb) [Thu Apr 18 09:46:09.516549 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1811, in full_dispatch_request [Thu Apr 18 09:46:09.516584 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] rv = self.preprocess_request() [Thu Apr 18 09:46:09.516619 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2087, in preprocess_request [Thu Apr 18 09:46:09.516654 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] rv = func() [Thu Apr 18 09:46:09.516688 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/var/www/securedrop/source_app/decorators.py", line 23, in decorated_function [Thu Apr 18 09:46:09.516724 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] return f(*args, **kwargs) [Thu Apr 18 09:46:09.516758 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/var/www/securedrop/source_app/__init__.py", line 159, in setup_g [Thu Apr 18 09:46:09.516793 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] g.codename = session['codename'] [Thu Apr 18 09:46:09.516828 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/werkzeug/local.py", line 377, in <lambda> [Thu Apr 18 09:46:09.516864 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] __getitem__ = lambda x, i: x._get_current_object()[i] [Thu Apr 18 09:46:09.516899 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] File "/usr/local/lib/python2.7/dist-packages/flask/sessions.py", line 83, in __getitem__ [Thu Apr 18 09:46:09.516933 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] return super(SecureCookieSession, self).__getitem__(key) [Thu Apr 18 09:46:09.516968 2019] [wsgi:error] [pid 7324:tid 3457186817792] [remote 127.0.0.1:29169] KeyError: 'codename' ``` ## Steps to Reproduce This error was initially hit by: 1) starting a source interface session on a 0.12.1 Xenial install 2) updating the 0.12.1 Xenial install to 0.12.2~rc1 via cron-apt 3) running a 0.12.1 db restore against the 0.12.2 database 4) attempting to continue the source session. It's also been reproduced during a test session by creating multiple sources and logging in and out repeatedly (h/t @eloquence ), but is not reliably reproducible. ## Expected Behavior Source Interface requests for valid URLs return the correct results. ## Actual Behavior SI requests all return 500 errors. ## Comments
I will try to reproduce it now. Followed the steps - starting a source interface session on a 0.12.1 Xenial install - updating the 0.12.1 Xenial install to 0.12.2~rc1 via cron-apt - running a 0.12.1 db restore against the 0.12.2 database - attempting to continue the source session. But, could not reproduce the error :( I should clarify that the testing that I did which resulted in the error was on a production instance running 0.12.1. The sequence was basically: login to source interface, submit a file, log out; rinse and repeat as new source (*without* resetting my Tor identity). I did this 10 times or so. I don't believe I took any unusual steps during the process; at one point, I was just effectively locked out in the manner Kevin describes, until I reset my identity. If what I experienced is the same issue, it is not a regression since 0.12.1. ^^ tried the above submission as new source around 30 times on `0.12.2` rc, could not reproduce it :( Need to find an easier way to reproduce this. I've removed the release QA label as we have no evidence that this is a regression introduced after 0.12.1. So far reproduction has been extremely difficult. As time allows, let's aim to understand the cause better, but per discussion yesterday, this is not a 0.12.2 blocker.
2019-04-28T02:50:38Z
[]
[]
freedomofpress/securedrop
4,416
freedomofpress__securedrop-4416
[ "4341" ]
9892d221471cb1ba82e3ec92d537823dcf225b2e
diff --git a/molecule/vagrant-packager-trusty/package.py b/molecule/vagrant-packager-trusty/package.py deleted file mode 120000 --- a/molecule/vagrant-packager-trusty/package.py +++ /dev/null @@ -1 +0,0 @@ -../vagrant-packager/package.py \ No newline at end of file diff --git a/molecule/vagrant-packager/package.py b/molecule/vagrant-packager/package.py --- a/molecule/vagrant-packager/package.py +++ b/molecule/vagrant-packager/package.py @@ -135,8 +135,6 @@ def main(): # Default to Xenial as base OS, but detect if script was invoked from the # Trusty-specific scenario, and use Trusty if so. TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "xenial") - if SCENARIO_NAME.endswith("-trusty"): - TARGET_PLATFORM = "trusty" for srv in ["app-staging", "mon-staging"]: diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -12,7 +12,6 @@ import i18n import template_filters import version -import platform from crypto_util import CryptoUtil from db import db @@ -64,12 +63,6 @@ def create_app(config): app.config['SQLALCHEMY_DATABASE_URI'] = db_uri db.init_app(app) - # Magic values for Xenial upgrade message - app.config.update( - XENIAL_WARNING_DATE=datetime.strptime('Mar 4 2019', '%b %d %Y'), - XENIAL_VER='16.04' - ) - app.storage = Storage(config.STORE_DIR, config.TEMP_DIR, config.JOURNALIST_KEY) @@ -151,10 +144,6 @@ def setup_g(): g.html_lang = i18n.locale_to_rfc_5646(g.locale) g.locales = i18n.get_locale2name() - if (platform.linux_distribution()[1] != app.config['XENIAL_VER'] and - datetime.now() >= app.config['XENIAL_WARNING_DATE']): - g.show_xenial_warning = True - if request.path.split('/')[1] == 'api': pass # We use the @token_required decorator for the API endpoints else: # We are not using the API diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -16,7 +16,7 @@ from db import db from models import Source from request_that_secures_file_uploads import RequestThatSecuresFileUploads -from source_app import main, info, api, disable +from source_app import main, info, api from source_app.decorators import ignore_static from source_app.utils import logged_in from store import Storage @@ -119,9 +119,6 @@ def setup_i18n(): g.html_lang = i18n.locale_to_rfc_5646(g.locale) g.locales = i18n.get_locale2name() - # Disables the app if the server is running Trusty past its EOL date. - disable.disable_app(app) - @app.before_request @ignore_static def check_tor2web(): diff --git a/securedrop/source_app/decorators.py b/securedrop/source_app/decorators.py --- a/securedrop/source_app/decorators.py +++ b/securedrop/source_app/decorators.py @@ -18,7 +18,7 @@ def ignore_static(f): a static resource.""" @wraps(f) def decorated_function(*args, **kwargs): - if request.path.startswith('/static') or request.path == '/org-logo': + if request.path.startswith('/static'): return # don't execute the decorated function return f(*args, **kwargs) return decorated_function diff --git a/securedrop/source_app/disable.py b/securedrop/source_app/disable.py deleted file mode 100644 --- a/securedrop/source_app/disable.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import date -from flask import render_template -from source_app.decorators import ignore_static -import platform - -XENIAL_VER = "16.04" -TRUSTY_DISABLE_DATE = date(2019, 4, 30) - - -def disable_app(app): - - @app.before_request - @ignore_static - def disable_ui(): - if(platform.linux_distribution()[1] != XENIAL_VER and - date.today() > TRUSTY_DISABLE_DATE): - return render_template('disabled.html')
diff --git a/docs/development/upgrade_testing.rst b/docs/development/upgrade_testing.rst --- a/docs/development/upgrade_testing.rst +++ b/docs/development/upgrade_testing.rst @@ -31,9 +31,8 @@ Upgrade testing using locally-built packages .. note:: As of ``0.12.1``, the default platform for upgrade testing - boxes is Ubuntu Xenial 16.04. For a limited time, we will also support - upgrade boxes based on Ubuntu Trusty 14.04. Substitute ``upgrade-trusty`` - for ``upgrade`` in the actions documented below to force use of Trusty. + boxes is Ubuntu Xenial 16.04. We no longer support upgrade boxes + based on Ubuntu Trusty 14.04. First, build the app code packages and create the environment: diff --git a/molecule/builder-trusty/tests/conftest.py b/molecule/builder-xenial/tests/conftest.py similarity index 90% rename from molecule/builder-trusty/tests/conftest.py rename to molecule/builder-xenial/tests/conftest.py --- a/molecule/builder-trusty/tests/conftest.py +++ b/molecule/builder-xenial/tests/conftest.py @@ -16,8 +16,7 @@ def pytest_namespace(): securedrop_test_vars = yaml.safe_load(f) # Tack on target OS for use in tests - securedrop_target_platform = os.environ.get("SECUREDROP_TARGET_PLATFORM", - "trusty") + securedrop_target_platform = os.environ.get("SECUREDROP_TARGET_PLATFORM") securedrop_test_vars["securedrop_target_platform"] = securedrop_target_platform # Wrapping the return value to accommodate for pytest namespacing return dict(securedrop_test_vars=securedrop_test_vars) diff --git a/molecule/builder-trusty/tests/test_build_dependencies.py b/molecule/builder-xenial/tests/test_build_dependencies.py similarity index 98% rename from molecule/builder-trusty/tests/test_build_dependencies.py rename to molecule/builder-xenial/tests/test_build_dependencies.py --- a/molecule/builder-trusty/tests/test_build_dependencies.py +++ b/molecule/builder-xenial/tests/test_build_dependencies.py @@ -2,7 +2,7 @@ import os -SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "trusty") +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") testinfra_hosts = [ "docker://{}-sd-app".format(SECUREDROP_TARGET_PLATFORM) ] diff --git a/molecule/builder-trusty/tests/test_legacy_paths.py b/molecule/builder-xenial/tests/test_legacy_paths.py similarity index 100% rename from molecule/builder-trusty/tests/test_legacy_paths.py rename to molecule/builder-xenial/tests/test_legacy_paths.py diff --git a/molecule/builder-trusty/tests/test_securedrop_deb_package.py b/molecule/builder-xenial/tests/test_securedrop_deb_package.py similarity index 99% rename from molecule/builder-trusty/tests/test_securedrop_deb_package.py rename to molecule/builder-xenial/tests/test_securedrop_deb_package.py --- a/molecule/builder-trusty/tests/test_securedrop_deb_package.py +++ b/molecule/builder-xenial/tests/test_securedrop_deb_package.py @@ -4,7 +4,7 @@ import tempfile -SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "trusty") +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") testinfra_hosts = [ "docker://{}-sd-dpkg-verification".format(SECUREDROP_TARGET_PLATFORM) ] diff --git a/molecule/builder-trusty/tests/test_security_updates.py b/molecule/builder-xenial/tests/test_security_updates.py similarity index 98% rename from molecule/builder-trusty/tests/test_security_updates.py rename to molecule/builder-xenial/tests/test_security_updates.py --- a/molecule/builder-trusty/tests/test_security_updates.py +++ b/molecule/builder-xenial/tests/test_security_updates.py @@ -1,5 +1,5 @@ import os -SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "trusty") +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") testinfra_hosts = [ "docker://{}-sd-sec-update".format(SECUREDROP_TARGET_PLATFORM) ] diff --git a/molecule/builder-trusty/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml similarity index 100% rename from molecule/builder-trusty/tests/vars.yml rename to molecule/builder-xenial/tests/vars.yml diff --git a/molecule/testinfra/staging/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/molecule/testinfra/staging/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -16,17 +16,6 @@ def test_apache_apt_packages(host, package): assert host.package(package).is_installed -def test_apache_apt_packages_trusty(host): - """ - Ensure required Apache packages are installed. Only checks Trusty-specific - packages; other tests handle more general apt dependencies for Apache. - """ - # Skip if testing against Xenial - if host.system_info.codename == "xenial": - return True - assert host.package("apache2-mpm-worker").is_installed - - def test_apache_security_config_deprecated(host): """ Ensure that /etc/apache2/security is absent, since it was setting diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -16,33 +16,7 @@ def test_tor_packages(Package, package): assert Package(package).is_installed -def _tor_service_running_trusty(host): - """ - Ensure tor is running and enabled. Tor is required for SSH access, - so it must be enabled to start on boot. Checks upstart/sysv-style - services, used by Trusty. - """ - # TestInfra tries determine the service manager intelligently, and - # inappropriately assumes Upstart on Trusty, due to presence of the - # `initctl` command. The tor service is handled via a SysV-style init - # script, so let's just shell out and verify the running and enabled - # states explicitly. - with host.sudo(): - assert host.check_output("service tor status") == \ - " * tor is running" - tor_enabled = host.check_output("find /etc/rc?.d -name S??tor") - - assert tor_enabled != "" - - tor_targets = tor_enabled.split("\n") - assert len(tor_targets) == 4 - for target in tor_targets: - t = host.file(target) - assert t.is_symlink - assert t.linked_to == "/etc/init.d/tor" - - -def _tor_service_running_xenial(host): +def test_tor_service_running(host): """ Ensure tor is running and enabled. Tor is required for SSH access, so it must be enabled to start on boot. Checks systemd-style services, @@ -53,20 +27,6 @@ def _tor_service_running_xenial(host): assert s.is_enabled -def test_tor_service_running(host): - """ - Ensure tor is running and enabled. Tor is required for SSH access, - so it must be enabled to start on boot. - - Calls a separate function depending on platform, to handle nuances - of upstart vs sysv init systems. - """ - if host.system_info.codename == "trusty": - _tor_service_running_trusty(host) - else: - _tor_service_running_xenial(host) - - @pytest.mark.parametrize('torrc_option', [ 'SocksPort 0', 'SafeLogging 1', diff --git a/molecule/testinfra/staging/common/test_platform.py b/molecule/testinfra/staging/common/test_platform.py --- a/molecule/testinfra/staging/common/test_platform.py +++ b/molecule/testinfra/staging/common/test_platform.py @@ -1,7 +1,7 @@ # We expect Ubuntu, either Trusty or Xenial, the two LTSes # currently targeted for support. -SUPPORTED_CODENAMES = ('trusty', 'xenial') -SUPPORTED_RELEASES = ('14.04', '16.04') +SUPPORTED_CODENAMES = ('xenial') +SUPPORTED_RELEASES = ('16.04') def test_ansible_version(host): diff --git a/molecule/testinfra/staging/common/test_release_upgrades.py b/molecule/testinfra/staging/common/test_release_upgrades.py --- a/molecule/testinfra/staging/common/test_release_upgrades.py +++ b/molecule/testinfra/staging/common/test_release_upgrades.py @@ -1,11 +1,10 @@ def test_release_manager_upgrade_channel(host): """ - Ensures that the `do-release-upgrade` command will honor - upgrades from Trusty to Xenial, but not suggest upgrades - from Xenial to Bionic (which is untested and unsupported.) + Ensures that the `do-release-upgrade` command will not + suggest upgrades from Xenial to Bionic (which is untested + and unsupported.) """ expected_channels = { - "trusty": "lts", "xenial": "never", } diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py --- a/molecule/testinfra/staging/common/test_system_hardening.py +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -78,13 +78,9 @@ def test_swap_disabled(host): # A leading slash will indicate full path to a swapfile. assert not re.search("^/", c, re.M) - if host.system_info.codename == "trusty": - # Expect that ONLY the headers will be present in the output. - rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority") - else: - # On Xenial, swapon 2.27.1 shows blank output, with no headers, so - # check for empty output as confirmation of no swap. - rgx = re.compile("^$") + # On Xenial, swapon 2.27.1 shows blank output, with no headers, so + # check for empty output as confirmation of no swap. + rgx = re.compile("^$") assert re.search(rgx, c) diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -6,7 +6,6 @@ import random import zipfile import base64 -import datetime import binascii from base64 import b64decode @@ -1591,61 +1590,6 @@ def test_render_locales(config, journalist_app, test_journo, test_source): assert url_end + '?l=en_US' in text, text -def test_render_xenial_positive(config, journalist_app, test_journo, mocker): - yesterday = datetime.datetime.now() - datetime.timedelta(days=1) - journalist_app.config.update( - XENIAL_WARNING_DATE=yesterday, - XENIAL_VER='16.04' - ) - - mocked_error_platform = mocker.patch('platform.linux_distribution') - mocked_error_platform.return_value = ('Ubuntu', '14.04', 'trusty') - - with journalist_app.test_client() as app: - _login_user(app, test_journo['username'], test_journo['password'], - test_journo['otp_secret']) - - resp = app.get(url_for('main.index')) - - text = resp.data.decode('utf-8') - assert "critical-skull" in text, text - - -def test_render_xenial_negative_version(config, journalist_app, test_journo, mocker): - yesterday = datetime.datetime.now() - datetime.timedelta(days=1) - journalist_app.config.update( - XENIAL_WARNING_DATE=yesterday, - XENIAL_VER='16.04' - ) - - mocked_error_platform = mocker.patch('platform.linux_distribution') - mocked_error_platform.return_value = ('Ubuntu', '16.04', 'xenial') - - with journalist_app.test_client() as app: - _login_user(app, test_journo['username'], test_journo['password'], - test_journo['otp_secret']) - resp = app.get('/') - - text = resp.data.decode('utf-8') - assert "critical-skull" not in text, text - - -def test_render_xenial_negative_date(config, journalist_app, test_journo): - tomorrow = datetime.datetime.now() + datetime.timedelta(days=1) - journalist_app.config.update( - XENIAL_WARNING_DATE=tomorrow, - XENIAL_VER='16.04' - ) - - with journalist_app.test_client() as app: - _login_user(app, test_journo['username'], test_journo['password'], - test_journo['otp_secret']) - resp = app.get('/') - - text = resp.data.decode('utf-8') - assert "critical-skull" not in text, text - - def test_download_selected_submissions_from_source(journalist_app, test_journo, test_source): diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import gzip -import platform import re import subprocess import six @@ -14,21 +13,14 @@ from . import utils import version -from datetime import date from db import db from models import Source, Reply from source_app import main as source_app_main from source_app import api as source_app_api -from source_app import disable as source_app_disable from .utils.db_helper import new_codename from .utils.instrument import InstrumentedApp overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1) -TRUSTY_DISABLED_ENDPOINTS = ['main.index', 'main.lookup', 'main.generate', 'main.login', - 'info.download_journalist_pubkey', 'info.tor2web_warning', - 'info.recommend_tor_browser', 'info.why_download_journalist_pubkey'] -STATIC_ASSETS = ['css/source.css', 'i/custom_logo.png', 'i/font-awesome/fa-globe-black.png', - 'i/favicon.png'] def test_page_not_found(source_app): @@ -731,53 +723,3 @@ def test_source_can_only_delete_own_replies(source_app): reply = Reply.query.filter_by(filename=filename).one() assert reply.deleted_by_source - - -def test_source_disabled_when_trusty_is_eol(config, source_app): - with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: - mocked_platform.return_value = ("Ubuntu", "14.04", "Trusty") - - with source_app.test_client() as app: - source_app_disable.TRUSTY_DISABLE_DATE = date(2001, 1, 1) - - assert platform.linux_distribution()[1] == "14.04" - for endpoint in TRUSTY_DISABLED_ENDPOINTS: - resp = app.get(url_for(endpoint)) - assert resp.status_code == 200 - text = resp.data.decode('utf-8') - assert "We're sorry, our SecureDrop is currently offline." in text - # Ensure static assets are properly served - for asset in STATIC_ASSETS: - resp = app.get(url_for('static', filename=asset)) - assert resp.status_code == 200 - text = resp.data.decode('utf-8') - assert "We're sorry, our SecureDrop is currently offline." not in text - - -def test_source_not_disabled_before_trusty_eol(config, source_app): - with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: - mocked_platform.return_value = ("Ubuntu", "14.04", "Trusty") - - with source_app.test_client() as app: - source_app_disable.TRUSTY_DISABLE_DATE = date(2097, 1, 1) - assert platform.linux_distribution()[1] == "14.04" - for endpoint in TRUSTY_DISABLED_ENDPOINTS: - resp = app.get(url_for(endpoint), follow_redirects=True) - assert resp.status_code == 200 - text = resp.data.decode('utf-8') - assert "We're sorry, our SecureDrop is currently offline." not in text - - -def test_source_not_disabled_xenial(config, source_app): - with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: - mocked_platform.return_value = ("Ubuntu", "16.04", "Xenial") - - with source_app.test_client() as app: - source_app_disable.TRUSTY_DISABLE_DATE = date(2001, 1, 1) - - assert platform.linux_distribution()[1] == "16.04" - for endpoint in TRUSTY_DISABLED_ENDPOINTS: - resp = app.get(url_for(endpoint), follow_redirects=True) - assert resp.status_code == 200 - text = resp.data.decode('utf-8') - assert "We're sorry, our SecureDrop is currently offline." not in text
Remove Trusty-specific logic from application code and dev env ## Description Some platform-specific logic was added to handle the upgrade from Trusty to Xenial and should be removed: - Source Interface Submission disable logic: https://github.com/freedomofpress/securedrop/pull/4325 - Admin Interface alert for end-of-life OS: https://github.com/freedomofpress/securedrop/pull/4055 There is also some Trusty-specific logic in the development environment: - [Docker images](https://github.com/freedomofpress/securedrop/tree/develop/securedrop/dockerfiles/trusty) - [CI targets](https://github.com/freedomofpress/securedrop/blob/develop/.circleci/config.yml#L59 - The use of `BASE_OS` in Makefile targets - Upgrade testing scenarios and boxes Since Trusty will no longer be supported after 0.12.2, we should remove this logic to improve the maintainability of the codebase and reduce the risk of errors.
2019-05-08T16:53:14Z
[]
[]
freedomofpress/securedrop
4,422
freedomofpress__securedrop-4422
[ "4341" ]
00813de70a2b9b3b172b7cf036e1c372e283d708
diff --git a/molecule/vagrant-packager/package.py b/molecule/vagrant-packager/package.py --- a/molecule/vagrant-packager/package.py +++ b/molecule/vagrant-packager/package.py @@ -14,9 +14,6 @@ import xml.etree.ElementTree as ET -# Current script is symlinked into adjacent scenario, for Trusty compatibility. -# Look up "name" for scenario from real path (relative to symlink), but store -# all artifacts in primary scenario (via realpath). SCENARIO_NAME = os.path.basename(os.path.dirname(os.path.abspath(__file__))) SCENARIO_PATH = os.path.dirname(os.path.realpath(__file__)) BOX_BUILD_DIR = join(SCENARIO_PATH, "build") @@ -132,8 +129,7 @@ def main(): with open(TARGET_VERSION_FILE, 'r') as f: TARGET_VERSION = f.read().strip() - # Default to Xenial as base OS, but detect if script was invoked from the - # Trusty-specific scenario, and use Trusty if so. + # Default to Xenial as base OS. TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "xenial") for srv in ["app-staging", "mon-staging"]:
diff --git a/molecule/testinfra/staging/app/test_apparmor.py b/molecule/testinfra/staging/app/test_apparmor.py --- a/molecule/testinfra/staging/app/test_apparmor.py +++ b/molecule/testinfra/staging/app/test_apparmor.py @@ -107,7 +107,7 @@ def test_apparmor_total_profiles(host): with host.sudo(): total_expected = str(len(sdvars.apparmor_enforce) + len(sdvars.apparmor_complain)) - # Trusty has ~10, Xenial about ~20 profiles, so let's expect + # Xenial about ~20 profiles, so let's expect # *at least* the sum. assert host.check_output("aa-status --profiled") >= total_expected @@ -116,7 +116,7 @@ def test_aastatus_unconfined(host): """ Ensure that there are no processes that are unconfined but have a profile """ - # Trusty and Xenial should show 0 unconfined processes. + # There should be 0 unconfined processes. expected_unconfined = 0 unconfined_chk = str("{} processes are unconfined but have" diff --git a/molecule/testinfra/staging/common/test_platform.py b/molecule/testinfra/staging/common/test_platform.py --- a/molecule/testinfra/staging/common/test_platform.py +++ b/molecule/testinfra/staging/common/test_platform.py @@ -1,5 +1,4 @@ -# We expect Ubuntu, either Trusty or Xenial, the two LTSes -# currently targeted for support. +# We expect Ubuntu Xenial SUPPORTED_CODENAMES = ('xenial') SUPPORTED_RELEASES = ('16.04') @@ -19,10 +18,7 @@ def test_ansible_version(host): def test_platform(host): """ - SecureDrop requires Ubuntu Trusty 14.04 LTS. The shelf life - of that release means we'll need to migrate to Xenial LTS - at some point; until then, require hosts to be running - Ubuntu. + SecureDrop requires Ubuntu Ubuntu 16.04 LTS. """ assert host.system_info.type == "linux" assert host.system_info.distribution == "ubuntu"
Remove Trusty-specific logic from application code and dev env ## Description Some platform-specific logic was added to handle the upgrade from Trusty to Xenial and should be removed: - Source Interface Submission disable logic: https://github.com/freedomofpress/securedrop/pull/4325 - Admin Interface alert for end-of-life OS: https://github.com/freedomofpress/securedrop/pull/4055 There is also some Trusty-specific logic in the development environment: - [Docker images](https://github.com/freedomofpress/securedrop/tree/develop/securedrop/dockerfiles/trusty) - [CI targets](https://github.com/freedomofpress/securedrop/blob/develop/.circleci/config.yml#L59 - The use of `BASE_OS` in Makefile targets - Upgrade testing scenarios and boxes Since Trusty will no longer be supported after 0.12.2, we should remove this logic to improve the maintainability of the codebase and reduce the risk of errors.
2019-05-09T13:40:23Z
[]
[]
freedomofpress/securedrop
4,425
freedomofpress__securedrop-4425
[ "4251" ]
82eed078adf3fa464c1a8036779df78e4009c4e1
diff --git a/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py b/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py @@ -0,0 +1,28 @@ +"""Migrations for SecureDrop's 0.14.0 release + +Revision ID: a9fe328b053a +Revises: b58139cfdc8c +Create Date: 2019-05-21 20:23:30.005632 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a9fe328b053a' +down_revision = 'b58139cfdc8c' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.add_column(sa.Column('first_name', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('last_name', sa.String(length=255), nullable=True)) + + +def downgrade(): + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.drop_column('last_name') + batch_op.drop_column('first_name') diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py --- a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py @@ -1,9 +1,7 @@ """add checksum columns and revoke token table - Revision ID: b58139cfdc8c Revises: f2833ac34bb6 Create Date: 2019-04-02 10:45:05.178481 - """ import os from alembic import op diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py --- a/securedrop/journalist_app/account.py +++ b/securedrop/journalist_app/account.py @@ -5,8 +5,8 @@ from flask_babel import gettext from db import db -from journalist_app.utils import (make_password, set_diceware_password, - validate_user, validate_hotp_secret) +from journalist_app.utils import (make_password, set_diceware_password, set_name, validate_user, + validate_hotp_secret) def make_blueprint(config): @@ -18,6 +18,13 @@ def edit(): return render_template('edit_account.html', password=password) + @view.route('/change-name', methods=('POST',)) + def change_name(): + first_name = request.form.get('first_name') + last_name = request.form.get('last_name') + set_name(g.user, first_name, last_name) + return redirect(url_for('account.edit')) + @view.route('/new-password', methods=('POST',)) def new_password(): user = g.user diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -9,10 +9,10 @@ from sqlalchemy.orm.exc import NoResultFound from db import db -from models import Journalist, InvalidUsernameException, PasswordError +from models import Journalist, InvalidUsernameException, FirstOrLastNameError, PasswordError from journalist_app.decorators import admin_required -from journalist_app.utils import (make_password, commit_account_changes, - set_diceware_password, validate_hotp_secret) +from journalist_app.utils import (make_password, commit_account_changes, set_diceware_password, + validate_hotp_secret) from journalist_app.forms import LogoForm, NewUserForm @@ -54,6 +54,8 @@ def add_user(): if form.validate_on_submit(): form_valid = True username = request.form['username'] + first_name = request.form['first_name'] + last_name = request.form['last_name'] password = request.form['password'] is_admin = bool(request.form.get('is_admin')) @@ -63,6 +65,8 @@ def add_user(): otp_secret = request.form.get('otp_secret', '') new_user = Journalist(username=username, password=password, + first_name=first_name, + last_name=last_name, is_admin=is_admin, otp_secret=otp_secret) db.session.add(new_user) @@ -172,6 +176,22 @@ def edit_user(user_id): else: user.username = new_username + try: + first_name = request.form['first_name'] + Journalist.check_name_acceptable(first_name) + user.first_name = first_name + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") + return redirect(url_for("admin.edit_user", user_id=user_id)) + + try: + last_name = request.form['last_name'] + Journalist.check_name_acceptable(last_name) + user.last_name = last_name + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") + return redirect(url_for("admin.edit_user", user_id=user_id)) + user.is_admin = bool(request.form.get('is_admin')) commit_account_changes(user) diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py --- a/securedrop/journalist_app/forms.py +++ b/securedrop/journalist_app/forms.py @@ -30,11 +30,20 @@ def minimum_length_validation(form, field): num_chars=len(field.data)))) +def name_length_validation(form, field): + if len(field.data) > Journalist.MAX_NAME_LEN: + raise ValidationError(gettext( + 'Field can not be more than {max_chars} characters.' + .format(max_chars=Journalist.MAX_NAME_LEN))) + + class NewUserForm(FlaskForm): username = TextField('username', validators=[ InputRequired(message=gettext('This field is required.')), minimum_length_validation ]) + first_name = TextField('first_name', validators=[name_length_validation, Optional()]) + last_name = TextField('last_name', validators=[name_length_validation, Optional()]) password = HiddenField('password') is_admin = BooleanField('is_admin') is_hotp = BooleanField('is_hotp') diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -10,10 +10,9 @@ import i18n from db import db -from models import (get_one_or_else, Source, Journalist, - InvalidUsernameException, WrongPasswordException, - LoginThrottledException, BadTokenException, SourceStar, - PasswordError, Submission, RevokedToken) +from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, + WrongPasswordException, FirstOrLastNameError, LoginThrottledException, + BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) from rm import srm from store import add_checksum_for_file from worker import rq_worker_queue @@ -273,6 +272,15 @@ def delete_collection(filesystem_id): return job +def set_name(user, first_name, last_name): + try: + user.set_name(first_name, last_name) + db.session.commit() + flash(gettext('Name updated.'), "success") + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") + + def set_diceware_password(user, password): try: user.set_password(password) diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -26,7 +26,7 @@ import journalist_app from db import db -from models import Source, Journalist, PasswordError, InvalidUsernameException +from models import Source, Journalist, PasswordError, InvalidUsernameException, FirstOrLastNameError from management.run import run logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s') @@ -125,6 +125,30 @@ def _get_username(): return username +def _get_first_name(): + while True: + first_name = obtain_input('First name: ') + if not first_name: + return None + try: + Journalist.check_name_acceptable(first_name) + return first_name + except FirstOrLastNameError as e: + print('Invalid name: ' + str(e)) + + +def _get_last_name(): + while True: + last_name = obtain_input('Last name: ') + if not last_name: + return None + try: + Journalist.check_name_acceptable(last_name) + return last_name + except FirstOrLastNameError as e: + print('Invalid name: ' + str(e)) + + def _get_yubikey_usage(): '''Function used to allow for test suite mocking''' while True: @@ -151,6 +175,8 @@ def _make_password(): def _add_user(is_admin=False): with app_context(): username = _get_username() + first_name = _get_first_name() + last_name = _get_last_name() print("Note: Passwords are now autogenerated.") password = _make_password() @@ -175,6 +201,8 @@ def _add_user(is_admin=False): try: user = Journalist(username=username, + first_name=first_name, + last_name=last_name, password=password, is_admin=is_admin, otp_secret=otp_secret) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -291,6 +291,24 @@ class InvalidUsernameException(Exception): """Raised when a user logs in with an invalid username""" +class FirstOrLastNameError(Exception): + """Generic error for names that are invalid.""" + + def __init__(self, msg): + msg = 'Invalid first or last name.' + super(FirstOrLastNameError, self).__init__(msg) + + +class InvalidNameLength(FirstOrLastNameError): + """Raised when attempting to create a Journalist with an invalid name length.""" + + def __init__(self, name): + self.name_len = len(name) + if self.name_len > Journalist.MAX_NAME_LEN: + msg = "Name too long (len={})".format(self.name_len) + super(InvalidNameLength, self).__init__(msg) + + class LoginThrottledException(Exception): """Raised when a user attempts to log in @@ -341,6 +359,8 @@ class Journalist(db.Model): id = Column(Integer, primary_key=True) uuid = Column(String(36), unique=True, nullable=False) username = Column(String(255), nullable=False, unique=True) + first_name = Column(String(255)) + last_name = Column(String(255)) pw_salt = Column(Binary(32)) pw_hash = Column(Binary(256)) is_admin = Column(Boolean) @@ -358,10 +378,19 @@ class Journalist(db.Model): backref="journalist") MIN_USERNAME_LEN = 3 + MIN_NAME_LEN = 0 + MAX_NAME_LEN = 100 - def __init__(self, username, password, is_admin=False, otp_secret=None): + def __init__(self, username, password, first_name=None, last_name=None, is_admin=False, + otp_secret=None): self.check_username_acceptable(username) self.username = username + if first_name: + self.check_name_acceptable(first_name) + self.first_name = first_name + if last_name: + self.check_name_acceptable(last_name) + self.last_name = last_name self.set_password(password) self.is_admin = is_admin self.uuid = str(uuid.uuid4()) @@ -400,6 +429,14 @@ def set_password(self, passphrase): self.passphrase_hash = argon2.using(**ARGON2_PARAMS).hash(passphrase) + def set_name(self, first_name, last_name): + if first_name: + self.check_name_acceptable(first_name) + if last_name: + self.check_name_acceptable(last_name) + self.first_name = first_name + self.last_name = last_name + @classmethod def check_username_acceptable(cls, username): if len(username) < cls.MIN_USERNAME_LEN: @@ -407,6 +444,12 @@ def check_username_acceptable(cls, username): 'Username "{}" must be at least {} characters long.' .format(username, cls.MIN_USERNAME_LEN)) + @classmethod + def check_name_acceptable(cls, name): + # Enforce a reasonable maximum length for names + if len(name) > cls.MAX_NAME_LEN: + raise InvalidNameLength(name) + @classmethod def check_password_acceptable(cls, password): # Enforce a reasonable maximum length for passwords to avoid DoS diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py --- a/securedrop/qa_loader.py +++ b/securedrop/qa_loader.py @@ -79,10 +79,9 @@ def new_journalist(self): pw = ' '.join( [random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) for _ in range(7)]) - journalist = Journalist(random_chars(random.randint(3, 32), - nullable=False), - pw, - random_bool()) + journalist = Journalist(username=random_chars(random.randint(3, 32), nullable=False), + password=pw, + is_admin=random_bool()) if random_bool(): # to add legacy passwords back in journalist.passphrase_hash = None
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -284,10 +284,18 @@ def updated_image(): # giving extra time for upload to complete self.wait_for(updated_image, timeout=self.timeout * 6) - def _add_user(self, username, is_admin=False, hotp=None): + def _add_user(self, username, first_name="", last_name="", is_admin=False, hotp=None): username_field = self.driver.find_element_by_css_selector('input[name="username"]') username_field.send_keys(username) + if first_name: + first_name_field = self.driver.find_element_by_id("first_name") + first_name_field.send_keys(first_name) + + if last_name: + last_name_field = self.driver.find_element_by_id("last_name") + last_name_field.send_keys(last_name) + if hotp: hotp_checkbox = self.driver.find_element_by_css_selector('input[name="is_hotp"]') hotp_checkbox.click() @@ -316,8 +324,11 @@ def _admin_adds_a_user(self, is_admin=False, new_username=""): if not new_username: new_username = next(journalist_usernames) - self.new_user = dict(username=new_username, password=password) - self._add_user(self.new_user["username"], is_admin=is_admin) + self.new_user = dict(username=new_username, first_name='', last_name='', password=password) + self._add_user(self.new_user["username"], + first_name=self.new_user['first_name'], + last_name=self.new_user['last_name'], + is_admin=is_admin) if not hasattr(self, "accept_languages"): # Clicking submit on the add user form should redirect to @@ -469,7 +480,7 @@ def edit_user_page_loaded(): # There's a field to change the user's username and it's already filled # out with the user's username. username_field = self.driver.find_element_by_css_selector("#username") - assert username_field.get_attribute("placeholder") == username + assert username_field.get_attribute("value") == username # There's a checkbox to change the admin status of the user and # it's already checked appropriately to reflect the current status of # our user. @@ -516,10 +527,11 @@ def can_edit_user(): self.wait_for(can_edit_user) - new_username = self.new_user["username"] + "2" + new_characters = "2" + new_username = self.new_user["username"] + new_characters username_field = self.driver.find_element_by_css_selector('input[name="username"]') - username_field.send_keys(new_username) + username_field.send_keys(new_characters) update_user_btn = self.driver.find_element_by_css_selector("button[type=submit]") update_user_btn.click() @@ -756,8 +768,15 @@ def _admin_visits_reset_2fa_totp(self): def _admin_creates_a_user(self, hotp): self.safe_click_by_id("add-user") self.wait_for(lambda: self.driver.find_element_by_id("username")) - self.new_user = dict(username="dellsberg", password="pentagonpapers") - self._add_user(self.new_user["username"], is_admin=False, hotp=hotp) + self.new_user = dict(username="dellsberg", + first_name='', + last_name='', + password="pentagonpapers") + self._add_user(self.new_user["username"], + first_name=self.new_user['first_name'], + last_name=self.new_user['last_name'], + is_admin=False, + hotp=hotp) def _journalist_delete_all(self): for checkbox in self.driver.find_elements_by_name("doc_names_selected"): diff --git a/securedrop/tests/migrations/helpers.py b/securedrop/tests/migrations/helpers.py --- a/securedrop/tests/migrations/helpers.py +++ b/securedrop/tests/migrations/helpers.py @@ -22,6 +22,11 @@ def random_bytes(min, max, nullable): return random_chars(random.randint(min, max)) +def random_name(): + len = random.randint(1, 100) + return random_chars(len) + + def random_username(): len = random.randint(3, 64) return random_chars(len) diff --git a/securedrop/tests/migrations/migration_a9fe328b053a.py b/securedrop/tests/migrations/migration_a9fe328b053a.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_a9fe328b053a.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_chars + +random.seed('⎦˚◡˚⎣') + + +class Helper: + + def __init__(self): + self.journalist_id = None + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + } + sql = '''INSERT INTO journalists (uuid, username) + VALUES (:uuid, :username) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def create_journalist_after_migration(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + 'first_name': random_chars(50), + 'last_name': random_chars(50) + } + sql = ''' + INSERT INTO journalists (uuid, username, first_name, last_name) + VALUES (:uuid, :username, :first_name, :last_name) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + +class UpgradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + + def check_upgrade(self): + ''' + - Verify that Journalist first and last names are present after upgrade. + ''' + with self.app.app_context(): + journalists_sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(journalists_sql)).fetchall() + for journalist in journalists: + assert journalist['first_name'] is None + assert journalist['last_name'] is None + + +class DowngradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist_after_migration() + + def check_downgrade(self): + ''' + - Verify that Journalist first and last names are gone after downgrade. + ''' + with self.app.app_context(): + journalists_sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(journalists_sql)).fetchall() + for journalist in journalists: + try: + assert journalist['first_name'] + except NoSuchColumnError: + pass + try: + assert journalist['last_name'] + except NoSuchColumnError: + pass diff --git a/securedrop/tests/migrations/migration_b58139cfdc8c.py b/securedrop/tests/migrations/migration_b58139cfdc8c.py --- a/securedrop/tests/migrations/migration_b58139cfdc8c.py +++ b/securedrop/tests/migrations/migration_b58139cfdc8c.py @@ -151,7 +151,6 @@ def check_upgrade(self): and without being able to inject this config, the checksum function won't succeed. The above `load_data` function provides data that can be manually verified by checking the `rqworker` log file in `/tmp/`. - The other part of the migration, creating a table, cannot be tested regardless. ''' pass @@ -183,7 +182,6 @@ def load_data(self): def check_downgrade(self): ''' Verify that the checksum column is now gone. - The dropping of the revoked_tokens table cannot be checked. If the migration completes, then it wokred correctly. ''' diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -557,13 +557,13 @@ def test_user_edits_password_error_reponse(journalist_app, test_journo): models.LOGIN_HARDENING = original_hardening -def test_admin_add_user_when_username_already_taken(journalist_app, - test_admin): +def test_admin_add_user_when_username_already_taken(journalist_app, test_admin): with journalist_app.test_client() as app: - _login_user(app, test_admin['username'], test_admin['password'], - test_admin['otp_secret']) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) resp = app.post(url_for('admin.add_user'), data=dict(username=test_admin['username'], + first_name='', + last_name='', password=VALID_PASSWORD, is_admin=None)) text = resp.data.decode('utf-8') @@ -606,6 +606,8 @@ def test_admin_edits_user_password_too_long_warning(journalist_app, app.post( url_for('admin.new_password', user_id=test_journo['id']), data=dict(username=test_journo['username'], + first_name='', + last_name='', is_admin=None, password=overly_long_password), follow_redirects=True) @@ -626,6 +628,8 @@ def test_user_edits_password_too_long_warning(journalist_app, test_journo): app.post( url_for('account.new_password'), data=dict(username=test_journo['username'], + first_name='', + last_name='', is_admin=None, token=TOTP(test_journo['otp_secret']).now(), current_password=test_journo['password'], @@ -647,6 +651,8 @@ def test_admin_add_user_password_too_long_warning(journalist_app, test_admin): app.post( url_for('admin.add_user'), data=dict(username='dellsberg', + first_name='', + last_name='', password=overly_long_password, is_admin=None)) @@ -655,6 +661,34 @@ def test_admin_add_user_password_too_long_warning(journalist_app, test_admin): 'created. Please try again.', 'error') +def test_admin_add_user_first_name_too_long_warning(journalist_app, test_admin): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + resp = app.post(url_for('admin.add_user'), + data=dict(username=test_admin['username'], + first_name=overly_long_name, + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + text = resp.data.decode('utf-8') + assert 'Field can not be more than' in text + + +def test_admin_add_user_last_name_too_long_warning(journalist_app, test_admin): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + resp = app.post(url_for('admin.add_user'), + data=dict(username=test_admin['username'], + first_name='', + last_name=overly_long_name, + password=VALID_PASSWORD, + is_admin=None)) + text = resp.data.decode('utf-8') + assert 'Field can not be more than' in text + + def test_admin_edits_user_invalid_username( journalist_app, test_admin, test_journo): """Test expected error message when admin attempts to change a user's @@ -667,7 +701,10 @@ def test_admin_edits_user_invalid_username( with InstrumentedApp(journalist_app) as ins: app.post( url_for('admin.edit_user', user_id=test_admin['id']), - data=dict(username=new_username, is_admin=None)) + data=dict(username=new_username, + first_name='', + last_name='', + is_admin=None)) ins.assert_message_flashed( 'Username "{}" already taken.'.format(new_username), @@ -957,8 +994,7 @@ def test_http_get_on_admin_new_user_two_factor_page( with journalist_app.test_client() as app: _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) - resp = app.get( - url_for('admin.new_user_two_factor', uid=test_journo['id'])) + resp = app.get(url_for('admin.new_user_two_factor', uid=test_journo['id'])) # any GET req should take a user to the admin.new_user_two_factor page assert 'FreeOTP' in resp.data.decode('utf-8') @@ -976,12 +1012,13 @@ def test_admin_add_user(journalist_app, test_admin): username = 'dellsberg' with journalist_app.test_client() as app: - _login_user(app, test_admin['username'], test_admin['password'], - test_admin['otp_secret']) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) with InstrumentedApp(journalist_app) as ins: resp = app.post(url_for('admin.add_user'), data=dict(username=username, + first_name='', + last_name='', password=VALID_PASSWORD, is_admin=None)) @@ -1015,9 +1052,8 @@ def test_admin_add_user_too_short_username(journalist_app, test_admin): password='pentagonpapers', password_again='pentagonpapers', is_admin=None)) - assert ('Field must be at least {} characters long'.format( - Journalist.MIN_USERNAME_LEN) in - resp.data.decode('utf-8')) + msg = 'Field must be at least {} characters long' + assert (msg.format(Journalist.MIN_USERNAME_LEN) in resp.data.decode('utf-8')) def test_admin_add_user_yubikey_odd_length(journalist_app, test_admin): @@ -1027,6 +1063,8 @@ def test_admin_add_user_yubikey_odd_length(journalist_app, test_admin): resp = app.post(url_for('admin.add_user'), data=dict(username='dellsberg', + first_name='', + last_name='', password=VALID_PASSWORD, password_again=VALID_PASSWORD, is_admin=None, @@ -1044,6 +1082,8 @@ def test_admin_add_user_yubikey_valid_length(journalist_app, test_admin): resp = app.post(url_for('admin.add_user'), data=dict(username='dellsberg', + first_name='', + last_name='', password=VALID_PASSWORD, password_again=VALID_PASSWORD, is_admin=None, @@ -1066,6 +1106,8 @@ def test_admin_add_user_yubikey_correct_length_with_whitespace( resp = app.post(url_for('admin.add_user'), data=dict(username='dellsberg', + first_name='', + last_name='', password=VALID_PASSWORD, password_again=VALID_PASSWORD, is_admin=None, @@ -1081,11 +1123,12 @@ def test_admin_sets_user_to_admin(journalist_app, test_admin): new_user = 'admin-set-user-to-admin-test' with journalist_app.test_client() as app: - _login_user(app, test_admin['username'], test_admin['password'], - test_admin['otp_secret']) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) resp = app.post(url_for('admin.add_user'), data=dict(username=new_user, + first_name='', + last_name='', password=VALID_PASSWORD, is_admin=None)) assert resp.status_code in (200, 302) @@ -1095,7 +1138,7 @@ def test_admin_sets_user_to_admin(journalist_app, test_admin): assert journo.is_admin is False resp = app.post(url_for('admin.edit_user', user_id=journo.id), - data=dict(is_admin=True)) + data=dict(first_name='', last_name='', is_admin=True)) assert resp.status_code in (200, 302) journo = Journalist.query.filter_by(username=new_user).one() @@ -1111,6 +1154,8 @@ def test_admin_renames_user(journalist_app, test_admin): resp = app.post(url_for('admin.add_user'), data=dict(username=new_user, + first_name='', + last_name='', password=VALID_PASSWORD, is_admin=None)) assert resp.status_code in (200, 302) @@ -1118,7 +1163,9 @@ def test_admin_renames_user(journalist_app, test_admin): new_user = new_user + 'a' resp = app.post(url_for('admin.edit_user', user_id=journo.id), - data=dict(username=new_user)) + data=dict(username=new_user, + first_name='', + last_name='')) assert resp.status_code in (200, 302), resp.data.decode('utf-8') # the following will throw an exception if new_user is not found @@ -1126,9 +1173,62 @@ def test_admin_renames_user(journalist_app, test_admin): Journalist.query.filter(Journalist.username == new_user).one() +def test_admin_adds_first_name_last_name_to_user(journalist_app, test_admin): + new_user = 'admin-first-name-last-name-user-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + assert resp.status_code in (200, 302) + journo = Journalist.query.filter(Journalist.username == new_user).one() + + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(username=new_user, + first_name='test name', + last_name='test name')) + assert resp.status_code in (200, 302) + + # the following will throw an exception if new_user is not found + # therefore asserting it has been created + Journalist.query.filter(Journalist.username == new_user).one() + + +def test_admin_adds_invalid_first_last_name_to_user(journalist_app, test_admin): + new_user = 'admin-invalid-first-name-last-name-user-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + assert resp.status_code in (200, 302) + journo = Journalist.query.filter(Journalist.username == new_user).one() + + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(username=overly_long_name, + first_name=overly_long_name, + last_name='test name'), + follow_redirects=True) + assert resp.status_code in (200, 302) + text = resp.data.decode('utf-8') + assert 'Name not updated' in text + + def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): - mocked_error_logger = mocker.patch( - 'journalist_app.admin.current_app.logger.error') + mocked_error_logger = mocker.patch('journalist_app.admin.current_app.logger.error') mocker.patch('journalist_app.admin.Journalist', side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None)) @@ -1139,6 +1239,8 @@ def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): with InstrumentedApp(journalist_app) as ins: app.post(url_for('admin.add_user'), data=dict(username='username', + first_name='', + last_name='', password=VALID_PASSWORD, is_admin=None)) ins.assert_message_flashed( @@ -1283,7 +1385,8 @@ def test_user_authorization_for_posts(journalist_app): url_for('main.bulk'), url_for('account.new_two_factor'), url_for('account.reset_two_factor_totp'), - url_for('account.reset_two_factor_hotp')] + url_for('account.reset_two_factor_hotp'), + url_for('account.change_name')] with journalist_app.test_client() as app: for url in urls: resp = app.post(url) @@ -1401,6 +1504,33 @@ def test_valid_user_password_change(journalist_app, test_journo): assert 'Password updated.' in resp.data.decode('utf-8') +def test_valid_user_first_last_name_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + resp = app.post(url_for('account.change_name'), + data=dict(first_name='test', + last_name='test'), + follow_redirects=True) + + assert 'Name updated.' in resp.data.decode('utf-8') + + +def test_valid_user_invalid_first_last_name_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + resp = app.post(url_for('account.change_name'), + data=dict(first_name=overly_long_name, + last_name=overly_long_name), + follow_redirects=True) + + assert 'Name not updated' in resp.data.decode('utf-8') + + def test_regenerate_totp(journalist_app, test_journo): old_secret = test_journo['otp_secret'] @@ -1579,7 +1709,7 @@ def test_render_locales(config, journalist_app, test_journo, test_source): # we need the relative URL, not the full url including proto / localhost url_end = url.replace('http://', '') - url_end = url_end[url_end.index('/')+1:] + url_end = url_end[url_end.index('/') + 1:] with app.test_client() as app: _login_user(app, test_journo['username'], test_journo['password'], @@ -1696,14 +1826,11 @@ def test_download_unread_all_sources(journalist_app, test_journo): # All the not dowloaded submissions are in the zipfile for submission in bulk['not_downloaded0']: - zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo( - os.path.join( - "unread", - bulk['source0'].journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - bulk['source0'].last_updated.date()), - submission.filename - )) + zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo(os.path.join( + "unread", + bulk['source0'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], bulk['source0'].last_updated.date()), + submission.filename)) assert zipinfo for submission in bulk['not_downloaded1']: diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py --- a/securedrop/tests/test_manage.py +++ b/securedrop/tests/test_manage.py @@ -65,6 +65,8 @@ def test_handle_invalid_secret(journalist_app, config, mocker, capsys): """Regression test for bad secret logic in manage.py""" mocker.patch("manage._get_username", return_value='ntoll'), + mocker.patch("manage._get_first_name", return_value=''), + mocker.patch("manage._get_last_name", return_value=''), mocker.patch("manage._get_yubikey_usage", return_value=True), mocker.patch("manage.obtain_input", side_effect=YUBIKEY_HOTP), @@ -92,6 +94,8 @@ def test_exception_handling_when_duplicate_username(journalist_app, """Regression test for duplicate username logic in manage.py""" mocker.patch("manage._get_username", return_value='foo-bar-baz') + mocker.patch("manage._get_first_name", return_value='') + mocker.patch("manage._get_last_name", return_value='') mocker.patch("manage._get_yubikey_usage", return_value=False) original_config = manage.config @@ -119,6 +123,8 @@ def test_exception_handling_when_duplicate_username(journalist_app, # Note: we use the `journalist_app` fixture because it creates the DB def test_delete_user(journalist_app, config, mocker): mocker.patch("manage._get_username", return_value='test-user-56789') + mocker.patch("manage._get_first_name", return_value='') + mocker.patch("manage._get_last_name", return_value='') mocker.patch("manage._get_yubikey_usage", return_value=False) mocker.patch("manage._get_username_to_delete", return_value='test-user-56789') @@ -194,6 +200,16 @@ def test_get_username(mocker): assert manage._get_username() == 'foo-bar-baz' +def test_get_first_name(mocker): + mocker.patch("manage.obtain_input", return_value='foo-bar-baz') + assert manage._get_first_name() == 'foo-bar-baz' + + +def test_get_last_name(mocker): + mocker.patch("manage.obtain_input", return_value='foo-bar-baz') + assert manage._get_last_name() == 'foo-bar-baz' + + def test_clean_tmp_do_nothing(caplog): args = argparse.Namespace(days=0, directory=' UNLIKELY::::::::::::::::: ', diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -17,7 +17,7 @@ # models.{Journalist, Reply} -def init_journalist(is_admin=False): +def init_journalist(first_name=None, last_name=None, is_admin=False): """Initialize a journalist into the database. Return their :class:`models.Journalist` object and password string. @@ -29,7 +29,11 @@ def init_journalist(is_admin=False): """ username = current_app.crypto_util.genrandomid() user_pw = current_app.crypto_util.genrandomid() - user = models.Journalist(username, user_pw, is_admin) + user = models.Journalist(username=username, + password=user_pw, + first_name=first_name, + last_name=last_name, + is_admin=is_admin) db.session.add(user) db.session.commit() return user, user_pw
Add Identity text fields in Admin interface ## Description In the Qubes Workstation Client, journalists will have first-initial/last-initial square "ID Badges" to represent themselves and each other, in message threads and task logs. Example, just below. In order to support this, the existing Admin interface needs to have capabilities to support those ID badges with data. Screenshots of the most obvious solution to this, are below. Per the convo thread below this comment, alternative options could ideally be considered and tested prior to an implementation. - FN/LN fields should not be required, as they'll only deliver value on the Workstation Client - Both name fields should be longer than the existing username field, to accommodate for lengthy English hyphenated names and lengthy first and last names in non-Western alphabets/cultures. - When onboarding a newsroom to the Workstation Client product, it should be made clear that adding FN/LN entries for all journalists (even if just as single initials) is recommended. - In attached mockups, a subtle few tweaks were also made to the list of users in the Admin list. Sorry, I had to. Only done to facilitate legibility w/ existing assets and type styles. - Left-align all fields - Increased linespacing (or top/bottom row/table padding) - Add a pale grey horizontal line between entries. - This will help make the list more readable with multiple empty FN/LN entries. - Reduce type size and make grey, the timestamp entries. Another legibility tweak. - Move the "Edit" column to be the first on the left, and the "Delete" column to be the last on the right. ## User Research Evidence The UX kid just says to (and the Workstation Client will surface collaboration info much less easily, if this capability is not added). "LL" for Lois Lane, and "SY" for Steve Yzerman is an existing messaging-app and email-app paradigm that has been validated in testing as having low cognitive friction, while also being easy to implement (s'long as the columns in data tables already exist, hence this Issue). ## User Stories As the Admin for the Daily Planet, I want to give my SecureDrop journalist users first names and last names. This will help surface cross-team activity clearly and in a fashion that's easy to comprehend, across the super sweet new Qubes Workstation Client. Because the Qubes Workstation is SO awesome and all our newsroom's journalists want to use it, I'd also like the list of all users tidied-up a touch so that it's more legible beyond 3-4 line items (a little ux bird hinted that'd help). As a Journalist using the Qubes Workstation Client, I want to be able to see my own activity reflected in an easy to recognize fashion. Likewise, I want to be able to see the contributions and activities of my colleagues in a similarly easy to recognize fashion. ![image](https://user-images.githubusercontent.com/8262612/54054293-a7f4fe80-419e-11e9-9672-d696a11470e2.png) ![image](https://user-images.githubusercontent.com/8262612/54053622-ae827680-419c-11e9-941f-3adfdce8cad7.png)
Workstation mockups (with various design elements in flux) showing examples of aforementioned ID Badges in use, to surface activity 411... ![image](https://user-images.githubusercontent.com/8262612/54054502-2f427200-419f-11e9-9099-6d9c919e3db7.png) ![image](https://user-images.githubusercontent.com/8262612/54054593-7d577580-419f-11e9-8b11-0b0961bd133a.png) Am curious though, if users in more adversarial orgs/countries may feel vulnerable being asked for this info—like it could put them at risk existing in a system, vs a less easy to pin on them username. Def something I'd like to see folks asked about, in forthcoming testing/interview sessions! What do you propose as a fallback when no real name is configured (feel free to link to relevant issue)? @eloquence for the Workstation? The first two letters of the username is the most no-brainer thing that comes to mind. If my speculation around the importance of maintaining plausible deniability is legit (or just because we should because we're so security/risk-focused?), an alternative approach to FN/LN fields would be "How would you like to be called?" as one text field, and then as the second one showing an ID badge and requesting 2 characters be entered for a team to identify that person. Such a screen would require supportive text to properly guide admins from newsrooms like Bloomberg and Meduza (on opposite sides of a risk spectrum), and everyone in between. Happy to mock-up the latter at some point. Not wanting to get too distracted from immediate tasks with this atm, tho. I'm increasingly liking the idea of the latter, tbh. Optimally, we could offer BOTH methods as options in the Config panel, so the less risk-averse (and more culturally mainstream) newsrooms could get what they're comfortable with, and the more adversarial newsrooms could get something to meet their needs. ...we haven't yet created salutory text in the Workstation Client UX ("Hello, Erik!") so the "How would you like to be called?" option may not be necessary. Would be fun to do testing with various options of this, tbh! > for the Workstation? The first two letters of the username is the most no-brainer thing that comes to mind. That makes sense to me. Should the username `dellsberg` become `DE`, `de`, or `De`? I would say `de` since it most closely approximates the actual username and therefore is most likely to be familiar. (The user name is case-sensitive.) Regarding the security question: Right now, from the source's point of view, replies are not attributed. We've discussed potentially changing this, but no such change is currently on the backlog. Provided that sources continue to see replies without attribution for now, I think it's unlikely that this type of optional attribution _to other journalists_ poses a security risk. If/when we cross the line to reply attribution (from the source's point of view) we have to be very clear about a) what the default behavior should be, b) how that reply attribution is communicated to the journalist. Does that make sense? I edited the main comment in the Client Epic Issue to have the username-initialization for now to be `DE` but I had not thought of the angle you offer—which frankly, is quite a judo-move observation! I woulda never thought of that, hooray collaboration! Will now edit the Client Epic Issue, to reflect `de` as the styling of choice for `dellbserg`. :) Reply Attributions that the Source sees: Yeah, I'd had that filed in the back of my head as a ways-out can of worms yet to touch. In-App attributions that other Journalists see: So... thinking back to my own work with a far less legal-risk-involved art group, even in our own internal stuff we never used both our first and last names, or initials, together. Should a Workstation laptop fall into the wrong hands, or should a State actor be able to hack into an SD instance, Journalists would then then be "exposed" by name as indictable leak facilitators to target. That may be unreasonably extreme to anticipate, but that could be a back-of-mind concern for journalists (and/or Sources) in especially adversary newsrooms. Thoughts? Since it's optional, I think the main question is how we communicate a) the fact that it's optional, b) the fact that only other journalists/admins see it, not sources. On the page where user names are provisioned, we may want to provide some additional hints to that effect, so that administrators can make an informed decision when provisioning user accounts about whether or not to add a real name. That said, I'm adding the "security" label and tagging @emkll as any change that relates to a news organization's threat model should get his input. @eloquence I just remembered last night, why initials must be capitalized in ID Badges vs lowercase—because capital letters are all on a consistent rectilinear visual grid, and for l/c letters such as "p" and "f" with "ascenders" and "descenders," visual wonkiness happens. Additionally, letters with descenders simply have nowhere to fit into a compact space. I personally prefer the legibility benefits of l/c, however—but for automation and usability, I'm afraid they gotta be uppercase. I did a whole study on how this all works when I worked on ID Badges at Yahoo!, a million years ago (ok, 13—but in tech, that's almost a million?). I appreciate your suggestion, though. Does this make sense? I don't see any security issues by doing this if the response attribution is Journalist-facing only and provided that it's opt-in, since we definitely don't want to surprise existing users (Journalists) with this new behavior. The username + real name solution described above (or a similar username + initials scheme, where initials are opt-in) seems like a sound approach to me: using a new field will ensure it's completely opt-in.
2019-05-09T19:01:49Z
[]
[]
freedomofpress/securedrop
4,436
freedomofpress__securedrop-4436
[ "3911" ]
ff00f4c718e2f5e4428e5706215fa50e03915204
diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -101,6 +101,18 @@ def collection(self): collection.sort(key=lambda x: int(x.filename.split('-')[0])) return collection + @property + def fingerprint(self): + return current_app.crypto_util.getkey(self.filesystem_id) + + @fingerprint.setter + def fingerprint(self, value): + raise NotImplementedError + + @fingerprint.deleter + def fingerprint(self): + raise NotImplementedError + @property def public_key(self): return current_app.crypto_util.export_pubkey(self.filesystem_id) @@ -136,7 +148,8 @@ def to_json(self): 'interaction_count': self.interaction_count, 'key': { 'type': 'PGP', - 'public': self.public_key + 'public': self.public_key, + 'fingerprint': self.fingerprint }, 'number_of_documents': docs_msg_count['documents'], 'number_of_messages': docs_msg_count['messages'],
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -277,6 +277,8 @@ def test_authorized_user_gets_single_source(journalist_app, test_source, assert response.status_code == 200 assert response.json['uuid'] == test_source['source'].uuid + assert response.json['key']['fingerprint'] == \ + test_source['source'].fingerprint assert 'BEGIN PGP PUBLIC KEY' in response.json['key']['public']
Include `fingerprint` in the `key` object in a `Source` response ## Description To easily manipulate and reference PGP keys using GPG or GPG-wrappers, we need to know the key's fingerprint. It may make sense for the server to calculate this for us and send it as part of the response.
we should implement this as part of resolving https://github.com/freedomofpress/securedrop-client/issues/363. The reason why is because the existing logic on the client side parsing stdout to get the key fingerprint needs modification when we use the qubes RPC service for importing keys. This is a symptom of the fact that parsing stdout to get key fingerprints is pretty brittle. We should just expose the fingerprint via the API.
2019-05-14T01:12:56Z
[]
[]
freedomofpress/securedrop
4,467
freedomofpress__securedrop-4467
[ "4267" ]
81dd5aadc6489a08404dd2b26140fba174bd7c73
diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py --- a/securedrop/source_app/api.py +++ b/securedrop/source_app/api.py @@ -11,10 +11,12 @@ def make_blueprint(config): @view.route('/metadata') def metadata(): - meta = {'gpg_fpr': config.JOURNALIST_KEY, - 'sd_version': version.__version__, - 'server_os': platform.linux_distribution()[1], - } + meta = { + 'gpg_fpr': config.JOURNALIST_KEY, + 'sd_version': version.__version__, + 'server_os': platform.linux_distribution()[1], + 'supported_languages': config.SUPPORTED_LOCALES + } resp = make_response(json.dumps(meta)) resp.headers['Content-Type'] = 'application/json' return resp
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -545,7 +545,7 @@ def test_why_journalist_key(source_app): assert "Why download the journalist's public key?" in text -def test_metadata_route(source_app): +def test_metadata_route(config, source_app): with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: mocked_platform.return_value = ("Ubuntu", "16.04", "xenial") with source_app.test_client() as app: @@ -554,6 +554,8 @@ def test_metadata_route(source_app): assert resp.headers.get('Content-Type') == 'application/json' assert resp.json.get('sd_version') == version.__version__ assert resp.json.get('server_os') == '16.04' + assert resp.json.get('supported_languages') ==\ + config.SUPPORTED_LOCALES def test_login_with_overly_long_codename(source_app):
Add list of supported languages to the metadata API Whether or not a SecureDrop is available in one of the [supported languages](https://docs.securedrop.org/en/latest/admin.html#configuring-localization-for-the-source-interface-and-the-journalist-interface) is public information enumerated at the bottom of the source interface, but it's not currently exposed in the metadata API. Returning the list of supported languages along with the other instance metadata would be useful, including for the envisioned source interface scanner integrated with securedrop.org. # User Stories As a translator, I want to know which languages are currently used by SecureDrop users, so I know if and where my translations have real world impact. As a SecureDrop support team member, I want to know at a glance whether news organizations have configured supported languages, so I can point out to them if/when translations relevant to them are available. As a SecureDrop.org visitor, I'd like to know if a SecureDrop instance is available in the language I speak, so that I know if I can navigate it with confidence, and that it is likely to accept submissions in my language.
hey @camfassett - are you interested in picking up a SecureDrop issue? This is a good first issue. The place you'd want to add the supported languages is [this API endpoint](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/source_app/api.py#L12). You'll want to expose the value of `config.SUPPORTED_LOCALES` there. You should get something like: ``` {"supported_languages": ["ar", "de_DE", "es_ES", "en_US", "el", "fr_FR", "it_IT", "nb_NO", "nl", "pt_BR", "tr", "zh_Hant"], "server_os": "16.04", "sd_version": "0.12.1", "gpg_fpr": "65A1B5FF195B56353CC63DFFCC40EF1228271441"} ``` To check that the endpoint is working as expected, you'd also want to add asserts to the existing `/metadata` unit test in [this file](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/tests/test_source.py#L527), or write a new unit test. Thanks, @redshiftzero! I won't have time to look at this until next week, but I'd like to work on this issue. Hey @eloquence, I would like to work on this issue. Let me know if I can pick this up?
2019-05-26T14:57:31Z
[]
[]
freedomofpress/securedrop
4,486
freedomofpress__securedrop-4486
[ "4439" ]
7ab33ffa821788c5bce35d1d8992ef64e01ebcc3
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.12.2' +version = '0.13.0' # The full version, including alpha/beta/rc tags. -release = '0.12.2' +release = '0.13.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.13.0~rc1' +__version__ = '0.13.0'
diff --git a/molecule/builder-xenial/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml --- a/molecule/builder-xenial/tests/vars.yml +++ b/molecule/builder-xenial/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "0.13.0~rc1" +securedrop_version: "0.13.0" ossec_version: "3.0.0" keyring_version: "0.1.2" config_version: "0.1.3"
Release Securedrop 0.13.0 This is a tracking issue for the upcoming release of SecureDrop 0.13.0 - tasks may get added or modified. **Feature freeze:** EOD Pacific time, Wednesday, May 15 **String freeze:** Wednesday, May 15 **Pre-release announcement:** Wednesday, May 22 **Release date:** Wednesday, May 29 **Release manager:** @zenmonkeykstop **Deputy release manager:** @emkll _SecureDrop maintainers and testers:_ As you QA 0.13.0, please report back your testing results as comments on this ticket. File GitHub issues for any problems found, tag them "QA: Release", and associate them with the 0.13.0 milestone for tracking (or ask a maintainer to do so). Test debian packages will be posted on https://apt-test.freedom.press signed with [the test key](https://gist.githubusercontent.com/conorsch/ec4008b111bc3142fca522693f3cce7e/raw/2968621e8ad92db4505a31fcc5776422d7d26729/apt-test%2520apt%2520pubkey). An Ansible playbook testing the upgrade path is [here](https://gist.github.com/conorsch/e7556624df59b2a0f8b81f7c0c4f9b7d). # Prepare release candidate (0.13.0~rc1) - [x] Prepare 0.13.0-rc1 release changelog - @zenmonkeykstop - [x] Branch off 0.13.0~rc1 into release/0.13.0 - @zenmonkeykstop - [x] Build debs and put up `0.13.0~rc1` on test apt server - @emkll # [QA Matrix for 0.13.0](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) # [Test Plan for 0.13.0](https://github.com/freedomofpress/securedrop/wiki/0.13.0-Test-Plan) After each test, please update the QA matrix and post details for Basic Server Testing, Application Acceptance Testing and 0.13.0-specific testing below in comments to this ticket. # Final release - [x] Ensure builder in release branch is updated and/or update builder image - @emkll - [x] Merge final translations - @kushaldas, @rmol - [x] Push signed tag - - [x] Build final Debian packages for 0.13.0 - @conorsch - [x] Upload Debian packages to apt test - @conorsch - [x] Pre-Flight: Test install and upgrade (both cron-apt on Trusty, and Ansible on Xenial) of 0.13.0 works w/ prod repo debs, test updater logic in Tails - [x] Prepare and distribute pre-release messaging - @eloquence - [x] Prepare and distribute release messaging - @eloquence # Post release - [x] Merge changelog back to `develop` - @zenmonkeykstop via #4488 - [x] Bump version on `develop` in prep for 0.14.0 release - @zenmonkeykstop via #4488 - [x] Run translation tests nightly or weekly - @redshiftzero via https://github.com/freedomofpress/securedrop/pull/4489 - [x] Run deb-tests nightly or weekly - @redshiftzero via https://github.com/freedomofpress/securedrop/pull/4489 - [x] Update upgrade testing boxes - @conorsch - [x] Update roadmap wiki page - @eloquence
Draft pre-release messaging here: https://docs.google.com/document/d/1oDqDRs7hOV-fnRWMXVxxQdt8IBUvvX_PCbAunU8a7Is/edit# ## Clean install of 0.13.0 on NUC7 ### Environment - Install target: NUC7i5BNH - Tails version: 3.13.1 - Test Scenario: installation of 0.13.0 on Xenial - SSH over Tor: only as needed for specific tests - Release candidate: 0.13.0~rc1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, the user is logged out and a 500 error is not returned, as per [#4391](https://github.com/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [ ] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [x] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the sha256 hash of the encrypted submission. (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). # QA plan - NUC5s - NUC7s - Mac Minis - 1U servers in SF ## 0.13.0 QA Checklist For both upgrades and fresh installs, here is a list of functionality that requires testing. You can use this for copy/pasting into your QA report. Feel free to edit this message to update the plan as appropriate. If you have submitted a QA report already for a 0.13.0 release candidate with successful basic server testing and application acceptance testing sections, then you can skip these sections in subsequent reports, unless otherwise indicated by the Release Manager. This is to ensure that you focus your QA effort on the 0.13.0-specific changes as well as changes since the previous release candidate. **Note:** if you are testing an upgrade scenario, you should create several sources and submit a large number of files of varying sizes up to the 500MB limit before performing the upgrade. A large dataset is required in order to test the long-running database migration task implemented in [#4134](https://github.com/freedomofpress/securedrop/pull/4314). ### Environment - Install target: Mac Mini - Tails version: 3.13.2 - Test Scenario: fresh install - SSH over Tor: yes - Release candidate: 0.13.0-rc1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [x] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 **n/a** ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue **had to revert unstaged changes first** ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, the user is logged out, a single 500 error is returned, and the user can then log in, as per [#4391](https://github.com/securedrop/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [x] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. **n/a - clean install** - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [x] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the sha256 hash of the encrypted submission. **couldn't test - sha256sum takes ~2s for a 400MB file** (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). ### Preflight - [ ] Ensure the builder image is up-to-date on release day These tests should be performed the day of release prior to live debian packages on apt.freedom.press #### Basic testing - [ ] Install or upgrade occurs without error - [ ] Source interface is available and version string indicates it is 0.13.0 - [ ] A message can be successfully submitted #### Tails - [ ] The updater GUI appears on boot - [ ] The update successfully occurs to 0.13.0 - [ ] After reboot, updater GUI no longer appears # QA plan - NUC5s - NUC7s - Mac Minis - 1U servers in SF ## 0.13.0 QA Checklist For both upgrades and fresh installs, here is a list of functionality that requires testing. You can use this for copy/pasting into your QA report. Feel free to edit this message to update the plan as appropriate. If you have submitted a QA report already for a 0.13.0 release candidate with successful basic server testing and application acceptance testing sections, then you can skip these sections in subsequent reports, unless otherwise indicated by the Release Manager. This is to ensure that you focus your QA effort on the 0.13.0-specific changes as well as changes since the previous release candidate. **Note:** if you are testing an upgrade scenario, you should create several sources and submit a large number of files of varying sizes up to the 500MB limit before performing the upgrade. A large dataset is required in order to test the long-running database migration task implemented in [#4134](https://github.com/freedomofpress/securedrop/pull/4314). ### Environment - Install target: Mac Mini - Tails version: 3.14 - Test Scenario: upgrade from 0.12.2 - SSH over Tor: yes - Release candidate: RC1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 **skipped due to tester error** ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, the user is logged out and a 500 error is not returned, as per [#4391](https://github.com/securedrop/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [x] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. **ran cron-apt, rebooted, checked after a few mins - spot-checked values correct** - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [x] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the sha256 hash of the encrypted submission. (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). - **FAIL without playbook run, no changes in Apache config, passes otherwise** ### Preflight - [ ] Ensure the builder image is up-to-date on release day These tests should be performed the day of release prior to live debian packages on apt.freedom.press #### Basic testing - [ ] Install or upgrade occurs without error - [ ] Source interface is available and version string indicates it is 0.13.0 - [ ] A message can be successfully submitted #### Tails - [ ] The updater GUI appears on boot - [ ] The update successfully occurs to 0.13.0 - [ ] After reboot, updater GUI no longer appears ## Upgrade from 0.12.2 on NUC7 ### Environment - Install target: NUC7i5BNH - Tails version: 3.14 - Test Scenario: cron-apt upgrade of 0.12.2 to 0.13.0 - SSH over Tor: only as needed for specific tests - Release candidate: 0.13.0~rc1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [x] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, the user is logged out and a 500 error is not returned, as per [#4391](https://github.com/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [x] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [x] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the sha256 hash of the encrypted submission. (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). ## Upgrade from 0.12.2 on NUC5PYH (Completed) ### Environment - Install target: NUC5 - Tails version: 3.14 - Test Scenario: 0.12.2 -> 0.13.0 - SSH over Tor: Yes - Release candidate: RC1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [x] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI (DID NOT TEST) After updating to this release candidate and running `securedrop-admin tailsconfig` - [ ] The Updater GUI appears on boot - [ ] Updating occurs without issue ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, a single 500 error is returned and the user is logged out, as per [#4391](https://github.com/securedrop/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [x] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. * NOTE: :man_facepalming: forgot to run the migration before upgrading. I did have 2 files in the database, and they were hashes successfully. - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [ ] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the sha256 hash of the encrypted submission. (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). * requires an ansible run to update Apache config * Unfortunately the error message is not great, because the error handers on the application side do not return 405 like other methods (for obvious reasons) # QA plan - NUC5s - NUC7s - Mac Minis - 1U servers in SF ## 0.13.0 QA Checklist For both upgrades and fresh installs, here is a list of functionality that requires testing. You can use this for copy/pasting into your QA report. Feel free to edit this message to update the plan as appropriate. If you have submitted a QA report already for a 0.13.0 release candidate with successful basic server testing and application acceptance testing sections, then you can skip these sections in subsequent reports, unless otherwise indicated by the Release Manager. This is to ensure that you focus your QA effort on the 0.13.0-specific changes as well as changes since the previous release candidate. **Note:** if you are testing an upgrade scenario, you should create several sources and submit a large number of files of varying sizes up to the 500MB limit before performing the upgrade. A large dataset is required in order to test the long-running database migration task implemented in [#4134](https://github.com/freedomofpress/securedrop/pull/4314). ### Environment - Install target: SF 1U servers - Tails version: 3.14 - Test Scenario: upgrade - SSH over Tor: yes - Release candidate: RC1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install **no access to email but alerts triggered** - [ ] OSSEC emails are decrypted to correct key and I am able to decrypt them **no access to check** - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 **not tested - servers were updated to 0.13.0~rc1 automatically** ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [ ] You can click on a document and successfully decrypt using application private key **don't have SVS key to check** ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 0.13.0-specific changes - [x] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [x] When a user attempts to log in twice on the source interface, the user is logged out and a 500 error is not returned, as per [#4391](https://github.com/securedrop/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [x] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [x] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. - [x] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [x] Immediately after a large submission has been uploaded, *before* the `checksum` field has been updated, the Etag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by setting the `checksum` field to `null` for a submission in the database, then comparing the ETag value of a response to the sha256 hash of the encrypted submission. (API-specific): - [x] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [x] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [x] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [x] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). ### Preflight - [ ] Ensure the builder image is up-to-date on release day These tests should be performed the day of release prior to live debian packages on apt.freedom.press #### Basic testing - [ ] Install or upgrade occurs without error - [ ] Source interface is available and version string indicates it is 0.13.0 - [ ] A message can be successfully submitted #### Tails - [ ] The updater GUI appears on boot - [ ] The update successfully occurs to 0.13.0 - [ ] After reboot, updater GUI no longer appears ## Clean install in VMs (In progress) ### Environment - Install target: Xenial Vagrant VMs - Tails version: 3.14 - Test Scenario: Clean install - SSH over Tor: Yes - Release candidate: RC1 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [ ] [QA Matrix](https://docs.google.com/spreadsheets/d/1SFGDrsMAw6X-I4v6ZC7ZkmHl3eS9peh5xfuP-JtPOB8/edit#gid=0) checks pass #### Command Line User Generation - [ ] Can successfully add admin user and login #### Administration - [ ] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.12.2 and restore this backup on 0.13.0 - [ ] "Send Test OSSEC Alert" button in the journalist triggers an OSSEC alert and an email is sent. ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [ ] JS warning bar does not appear when using Security Slider high - [ ] JS warning bar does appear when using Security Slider Low ([#4211](https://github.com/freedomofpress/securedrop/pull/4211)) ##### First submission base cases - [ ] On generate page, refreshing codename produces a new 7-word codename - [ ] On submit page, empty submissions produce flashed message - [ ] On submit page, short message submitted successfully - [ ] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [ ] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [ ] Nonexistent codename cannot log in - [ ] Empty codename cannot log in - [ ] Legitimate codename can log in - [ ] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [ ] Can log in with 2FA tokens - [ ] incorrect password cannot log in - [ ] invalid 2fa token cannot log in - [ ] 2fa immediate reuse cannot log in ##### Index base cases - [ ] Filter by codename works - [ ] Starring and unstarring works - [ ] Click select all selects all submissions - [ ] Selecting all and clicking "Download" works ##### Individual source page - [ ] You can submit a reply and a flashed message and new row appears - [ ] You cannot submit an empty reply - [ ] Clicking "Delete Source And Submissions" and the source and docs are deleted - [ ] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [ ] The Updater GUI appears on boot - [ ] Updating occurs without issue ### 0.13.0-specific changes - [ ] The message to sources that have been flagged for reply has been updated as implemented in [#4428](https://github.com/freedomofpress/securedrop/pull/4428). Verify this by changing line 201 in `/var/www/securedrop/source_app/main.py` from `if entropy_avail >= 2400:` to `if entropy_avail >= 2400 and False:`, restarting Apache, creating a source, flagging the source in the Journalist Interface, and logging back in as the source. - [ ] When a user attempts to log in twice on the source interface, a single 500 error is returned and the user is logged out, as per [#4391](https://github.com/freedomofpress/securedrop/pull/4391). Verify this by opening a browser tab and visiting `/generate`, then opening a second tab and logging in or creating a new codename via `/create`, then returning to the first tab and attempting to log in. - [ ] The sha256 checksum of a given encrypted submission or reply is returned as the ETag header value in API download responses, as per [#4314](https://github.com/freedomofpress/securedrop/pull/4314): - [ ] **Upgrade Only:** BEFORE upgrade, run the QA loader (https://docs.securedrop.org/en/release-0.12.2/development/database_migrations.html#release-testing-migrations) and also upload a few _very_ large files (~100 MB+). Following the postinst migration task, the `checksum` field in the `submissions` and `replies` tables is populated with the sha256 hash of the corresponding files. Verify this by checking the Application Servers sqlite database and comparing values. - [ ] After a submission has been uploaded and the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by comparing the ETag value of a response to the value in the database - [ ] After a submission has been uploaded, but before the `checksum` field has been asynchronously updated, the ETag header value in the response to an API request for a file contains the sha256 hash of the file. Verify this by setting the `checksum` field of a submission to `null` in the database, then dowloading via the API comparing the ETag value of a response to the value in the database after the download. (API-specific): - [ ] API responses to `/api/v1/sources` include the GPG fingerprint for sources' reply keys in their respective `key` objects, as implemented in [#4436](https://github.com/freedomofpress/securedrop/pull/4436). - [ ] Calling `/api/v1/logout` invalidates the user's current authorization token, causing subsequent calls using the same token to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). - [ ] After the time set in `TOKEN_EXPIRATION_MINS` in `securedrop/journalist_app/api.py`, an unused authorization token is invalidated, causing subsequent calls to fail, as implemented in [#4349](https://github.com/freedomofpress/securedrop/pull/4349). Verify this by setting `TOKEN_EXPIRATION_MINS` to a low value, restarting Apache, creating an auth token, and waiting for the timeout before using it in a call - [ ] API calls using the HTTP DELETE method to the `/api/v1/sources/$UUID` endpoint successfully delete the source with uuid `$UUID`, along with all related files, as implemented in [#4023](https://github.com/freedomofpress/securedrop/pull/4023). Draft release messaging here: https://docs.google.com/document/d/1C9KaHslvGxk_3bjV0jgqj-MUQ6t6EVpJEWUaTNPGzZk/edit# Compared with pre-release messaging, mainly tense changes, pointer to workstation upgrade docs, and the new acknowledgments section.
2019-05-29T21:54:10Z
[]
[]
freedomofpress/securedrop
4,487
freedomofpress__securedrop-4487
[ "4410" ]
c0c0d47e68fb083a75a67d5d973aa16ef7366a8e
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -299,3 +299,14 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + +# -- Options for linkcheck -- + +linkcheck_retries = 3 + +linkcheck_ignore = [ + r'http://127.0.0.1(:\d+)?/?', + r'http://localhost(:\d+)?/?', + 'https://forum.securedrop.org/admin/users/list/active', + 'https://weblate.securedrop.org/projects/securedrop/securedrop/#repository', +]
diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst --- a/docs/development/testing_continuous_integration.rst +++ b/docs/development/testing_continuous_integration.rst @@ -62,7 +62,7 @@ output as shown below: This message shows that your installation appears to be working correctly. ... -.. _Docker installation: https://www.docker.com/community-edition#/download +.. _Docker installation: https://docs.docker.com/install/ Setup Environment Parameters ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Check documentation links as part of docs linting ## Description Sphinx [linkcheck](https://www.sphinx-doc.org/en/master/usage/builders/index.html#sphinx.builders.linkcheck.CheckExternalLinksBuilder) allows the verification of links with the `requests` library to ensure that the links are still valid and active. It might be useful to run this regularly or as part of CI to catch dead or broken links. ## User Stories As a user, clicking on a link and getting a 404 can be a a frustrating experience.
Great idea! I'll take a look into this— Could it be run as part of `make docs-lint`? possibly, how long does this take to run on the current docs? Not sure. Haven't been able to check with my current environment problems (#4472). Will try to test this using a previous docker image. Time to run sphinx's linkcheck feature is at least a few minutes. I'm at 5 minutes right now on an iffy connection, 70% done. Any objections to making this its own make target, `make docs-linkcheck`? Would be useful for those regularly working on docs as a whole. yeah making it a separate target from `docs-lint` is a good call given the time to run :+1:
2019-05-29T23:49:15Z
[]
[]
freedomofpress/securedrop
4,493
freedomofpress__securedrop-4493
[ "4441" ]
52ae0228b0b02def3790dcd360fec7c09c29a207
diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py --- a/securedrop/i18n_tool.py +++ b/securedrop/i18n_tool.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from __future__ import print_function import argparse import io import six @@ -201,7 +202,7 @@ def require_git_email_name(git_dir): 'git -C {d} config --get user.email > /dev/null'.format( d=git_dir)) if subprocess.call(cmd, shell=True): # nosec - if six.u('docker') in io.open('/proc/1/cgroup').read(): + if u'docker' in io.open('/proc/1/cgroup').read(): log.error("remember ~/.gitconfig does not exist " "in the dev-shell Docker container, " "only .git/config does") @@ -209,7 +210,7 @@ def require_git_email_name(git_dir): return True def update_docs(self, args): - l10n_content = six.u('.. GENERATED BY i18n_tool.py DO NOT EDIT:\n\n') + l10n_content = u'.. GENERATED BY i18n_tool.py DO NOT EDIT:\n\n' for (code, info) in sorted(I18NTool.SUPPORTED_LANGUAGES.items()): l10n_content += '* ' + info['name'] + ' (``' + code + '``)\n' includes = join(args.documentation_dir, 'includes') @@ -243,7 +244,7 @@ def update_from_weblate(self, args): codes = list(I18NTool.SUPPORTED_LANGUAGES.keys()) if args.supported_languages: codes = args.supported_languages.split(',') - for code in codes: + for code in sorted(codes): info = I18NTool.SUPPORTED_LANGUAGES[code] def need_update(p): @@ -282,40 +283,70 @@ def add(p): if updated: self.upstream_commit(args, code) + def translators(self, args, path, range): + """ + Return the set of people who've modified a file in Weblate. + + Extracts all the authors of translation changes to the given + path in the given range. Translation changes are identified by + the presence of "Translated using Weblate" in the commit + message. + """ + translation_re = re.compile('Translated using Weblate') + + path_changes = git( + '--no-pager', '-C', args.root, + 'log', '--format=%aN\x1e%s', range, '--', path, + _encoding='utf-8' + ) + path_changes = u"{}".format(path_changes) + path_changes = [c.split('\x1e') for c in path_changes.strip().split('\n')] + path_changes = [c for c in path_changes if len(c) > 1 and translation_re.match(c[1])] + + path_authors = [c[0] for c in path_changes] + return set(path_authors) + def upstream_commit(self, args, code): self.require_git_email_name(args.root) authors = set() - diffs = six.text_type(git('--no-pager', '-C', args.root, - 'diff', '--name-only', '--cached').stdout) - for path in diffs.strip().split('\n'): - previous_message = str(git( + diffs = u"{}".format(git('--no-pager', '-C', args.root, 'diff', '--name-only', '--cached')) + + # This change will need to be checked in the next translation + # cycle after 0.13.0, to ensure that only recent commits are + # being picked up here. + update_re = re.compile(r'(?:copied from| revision:) (\w+)') + + for path in sorted(diffs.strip().split('\n')): + previous_message = u"{}".format(git( '--no-pager', '-C', args.root, 'log', '-n', '1', path, _encoding='utf-8')) - m = re.search('copied from (\w+)', previous_message) + m = update_re.search(previous_message) if m: origin = m.group(1) else: origin = '' - git_authors = str(git( - '--no-pager', '-C', args.root, 'log', '--format=%aN', - '{}..i18n/i18n'.format(origin), '--', - path, _encoding='utf-8')) - git_authors = git_authors.strip().split('\n') - authors |= set(git_authors) - current = git('-C', args.root, 'rev-parse', 'i18n/i18n').stdout + authors |= self.translators(args, path, '{}..i18n/i18n'.format(origin)) + + authors = u"\n ".join(sorted(authors)) + + current = git('-C', args.root, 'rev-parse', 'i18n/i18n') info = I18NTool.SUPPORTED_LANGUAGES[code] - message = textwrap.dedent(six.u(""" - l10n: updated {code} {name} - - localizers: {authors} - - {remote} - copied from {current} - """.format(remote=args.url, - name=info['name'], - authors=", ".join(authors), - code=code, - current=current))) + message = textwrap.dedent(u""" + l10n: updated {name} ({code}) + + contributors: + {authors} + + updated from: + repo: {remote} + revision: {current} + """).format( + remote=args.url, + name=info['name'], + authors=authors, + code=code, + current=current + ) git('-C', args.root, 'commit', '-m', message) def set_update_from_weblate_parser(self, subps): @@ -338,6 +369,62 @@ def set_update_from_weblate_parser(self, subps): help='comma separated list of supported languages') parser.set_defaults(func=self.update_from_weblate) + def set_list_translators_parser(self, subps): + parser = subps.add_parser('list-translators', + help=('List contributing translators')) + root = join(dirname(realpath(__file__)), '..') + parser.add_argument( + '--root', + default=root, + help=('root of the SecureDrop git repository' + ' (default {})'.format(root))) + url = 'https://github.com/freedomofpress/securedrop-i18n' + parser.add_argument( + '--url', + default=url, + help=('URL of the weblate repository' + ' (default {})'.format(url))) + parser.add_argument( + '--all', + action="store_true", + help="List everyone who's ever contributed." + ) + parser.set_defaults(func=self.list_translators) + + def list_translators(self, args): + self.ensure_i18n_remote(args) + codes = list(I18NTool.SUPPORTED_LANGUAGES.keys()) + path_templates = [ + "install_files/ansible-base/roles/tails-config/templates/{}.po", + "securedrop/translations/{}/LC_MESSAGES/messages.po", + ] + update_re = re.compile(r'(?:copied from| revision:) (\w+)') + for code in sorted(codes): + translators = set([]) + info = I18NTool.SUPPORTED_LANGUAGES[code] + paths = [os.path.join(args.root, t.format(code)) for t in path_templates] + for path in paths: + if not os.path.exists(path): + print("Skipping non-existent .po file: {}".format(path), file=sys.stderr) + continue + try: + range = "i18n/i18n" + if not args.all: + previous_message = u"{}".format(git( + '--no-pager', '-C', args.root, 'log', '-n', '1', path, + _encoding='utf-8')) + m = update_re.search(previous_message) + if m: + origin = m.group(1) + else: + origin = '' + range = '{}..i18n/i18n'.format(origin) + t = self.translators(args, path, range) + translators.update(t) + except Exception as e: + print("Could not check git history of {}: {}".format(path, e), file=sys.stderr) + print(u"{} ({}):\n {}".format(code, info["name"], "\n ".join(sorted(translators)))) + def get_args(self): parser = argparse.ArgumentParser( prog=__file__, @@ -349,6 +436,7 @@ def get_args(self): self.set_translate_desktop_parser(subps) self.set_update_docs_parser(subps) self.set_update_from_weblate_parser(subps) + self.set_list_translators_parser(subps) return parser
diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py --- a/securedrop/tests/test_i18n_tool.py +++ b/securedrop/tests/test_i18n_tool.py @@ -284,8 +284,8 @@ def r(): '--url', join(str(tmpdir), 'i18n'), '--supported-languages', 'nl', ]) - assert 'l10n: updated nl' in r() - assert 'l10n: updated de_DE' not in r() + assert 'l10n: updated Dutch (nl)' in r() + assert 'l10n: updated German (de_DE)' not in r() # # de_DE is added but there is no change in the nl translation @@ -299,8 +299,8 @@ def r(): '--url', join(str(tmpdir), 'i18n'), '--supported-languages', 'nl,de_DE', ]) - assert 'l10n: updated nl' not in r() - assert 'l10n: updated de_DE' in r() + assert 'l10n: updated Dutch (nl)' not in r() + assert 'l10n: updated German (de_DE)' in r() # # nothing new for nl or de_DE: nothing is done @@ -313,8 +313,8 @@ def r(): '--url', join(str(tmpdir), 'i18n'), '--supported-languages', 'nl,de_DE', ]) - assert 'l10n: updated nl' not in r() - assert 'l10n: updated de_DE' not in r() + assert 'l10n: updated Dutch (nl)' not in r() + assert 'l10n: updated German (de_DE)' not in r() message = six.text_type(git('--no-pager', '-C', 'securedrop', 'show', _cwd=d, _encoding='utf-8')) assert six.u("Loïc") in message @@ -346,8 +346,8 @@ def r(): '--url', join(str(tmpdir), 'i18n'), '--supported-languages', 'nl,de_DE', ]) - assert 'l10n: updated nl' in r() - assert 'l10n: updated de_DE' not in r() + assert 'l10n: updated Dutch (nl)' in r() + assert 'l10n: updated German (de_DE)' not in r() message = six.text_type(git('--no-pager', '-C', 'securedrop', 'show', _cwd=d)) assert "Someone Else" in message
Update localization management guide for recent changes to string freeze ## Description We combined string and feature freeze, so we need to update the documentation here: https://docs.securedrop.org/en/release-0.12.2/development/i18n.html#release-management We should combine the tasks in the two sections, as well as remove the task "Backport every commit changing a source string to the release branch"
Also the link for "Go to the Weblate commit page for SecureDrop" in the docs should be https://weblate.securedrop.org/projects/securedrop/securedrop/#information
2019-05-31T17:52:39Z
[]
[]
freedomofpress/securedrop
4,496
freedomofpress__securedrop-4496
[ "4490" ]
d164ece81cd9d3c120954b5e6664abcb27664ac4
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -145,7 +145,9 @@ def setup_g(): # clear the session after we render the message so it's localized session.clear() + # Redirect to index with flashed message flash(Markup(msg), "important") + return redirect(url_for('main.index')) session['expires'] = datetime.utcnow() + \ timedelta(minutes=getattr(config,
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -3,6 +3,7 @@ import re import subprocess import six +import time from io import BytesIO from flask import session, escape, current_app, url_for, g @@ -667,7 +668,33 @@ def test_source_session_expiration(config, source_app): # which is always present and 'csrf_token' which leaks no info) session.pop('expires', None) session.pop('csrf_token', None) - assert not session, session + assert not session + + text = resp.data.decode('utf-8') + assert 'Your session timed out due to inactivity' in text + + +def test_source_session_expiration_create(config, source_app): + with source_app.test_client() as app: + + seconds_session_expire = 1 + config.SESSION_EXPIRATION_MINUTES = seconds_session_expire / 60. + + # Make codename, and then wait for session to expire. + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + + time.sleep(seconds_session_expire + 0.1) + + # Now when we click create, the session will have expired. + resp = app.post(url_for('main.create'), follow_redirects=True) + + # check that the session was cleared (apart from 'expires' + # which is always present and 'csrf_token' which leaks no info) + session.pop('expires', None) + session.pop('csrf_token', None) + assert not session + text = resp.data.decode('utf-8') assert 'Your session timed out due to inactivity' in text
Expired Source Interface session causes server error on new session ## Description Creating a new codename after a session has expired causes a server error. ## Steps to Reproduce 1. Visit the codename generation page (`/generate`) but do not press 'Submit Documents' 2. Wait for the session to expire 3. Press 'Submit Documents' Diff to force session expiry at 1 minute: ```diff diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py index d2679b737..60df38ece 100644 --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -148,9 +148,7 @@ def create_app(config): flash(Markup(msg), "important") session['expires'] = datetime.utcnow() + \ - timedelta(minutes=getattr(config, - 'SESSION_EXPIRATION_MINUTES', - 120)) + timedelta(minutes=1) # ignore_static here because `crypto_util.hash_codename` is scrypt # (very time consuming), and we don't need to waste time running if ``` ## Expected Behavior A new codename is generated or an appropriate error message is shown. ## Actual Behavior The Source Interface displays: > Server error > Sorry, the website encountered an error and was unable to complete your request.
See also the similar #4458 which may have the same root cause. I've only reproduced this once as I haven't mucked around with forcing session expiry yet. After forcing session expiration to be one minute, I can't reproduce this - are there any additional STR? Thanks for the repro attempt, will try a clean repro as well and document exact steps here if successful. One way to get the error appears to be to wait with attempting to create the codename _until_ the expiry of the session. I've updated the issue accordingly, and added the diff I used for forcing session expiry. Here's the traceback from the dev env: ``` KeyError KeyError: 'codename' Traceback (most recent call last) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/home/erik/Code/securedrop/securedrop/source_app/main.py", line 56, in create session['codename']) File "/usr/local/lib/python2.7/dist-packages/werkzeug/local.py", line 377, in <lambda> __getitem__ = lambda x, i: x._get_current_object()[i] File "/usr/local/lib/python2.7/dist-packages/flask/sessions.py", line 83, in __getitem__ return super(SecureCookieSession, self).__getitem__(key) KeyError: 'codename' ``` I can reproduce this behavior in `develop`, on the 0.13.0 release branch, and at commit c24f61f7408b7e8f412d14ea3eab441178171938 (prior to fix for #4361).
2019-05-31T23:04:17Z
[]
[]
freedomofpress/securedrop
4,523
freedomofpress__securedrop-4523
[ "4521" ]
87d692fbca97e493181040d8ae7cb09d86adea1f
diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py --- a/securedrop/source_app/info.py +++ b/securedrop/source_app/info.py @@ -1,8 +1,12 @@ # -*- coding: utf-8 -*- - -from io import StringIO +import six from flask import Blueprint, render_template, send_file, current_app +if six.PY2: + from cStringIO import StringIO # noqa +else: + from io import BytesIO # noqa + def make_blueprint(config): view = Blueprint('info', __name__) @@ -19,7 +23,11 @@ def recommend_tor_browser(): def download_journalist_pubkey(): journalist_pubkey = current_app.crypto_util.gpg.export_keys( config.JOURNALIST_KEY) - return send_file(StringIO(journalist_pubkey), + if six.PY2: + data = StringIO(journalist_pubkey) + else: + data = BytesIO(journalist_pubkey.encode('utf-8')) + return send_file(data, mimetype="application/pgp-keys", attachment_filename=config.JOURNALIST_KEY + ".asc", as_attachment=True)
diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -1,5 +1,6 @@ -from . import source_navigation_steps +from . import source_navigation_steps, journalist_navigation_steps from . import functional_test +import six class TestSourceInterface( @@ -17,3 +18,16 @@ def test_lookup_codename_hint(self): self._source_chooses_to_login() self._source_proceeds_to_login() self._source_sees_no_codename() + + +class TestDownloadKey( + functional_test.FunctionalTest, + journalist_navigation_steps.JournalistNavigationStepsMixin): + + def test_journalist_key_from_source_interface(self): + data = self.return_downloaded_content(self.source_location + + "/journalist-key", None) + + if six.PY3: + data = data.decode('utf-8') + assert "BEGIN PGP PUBLIC KEY BLOCK" in data
The Source Interface's /journalist-key endpoint returns a 404 instead of the Submission Key ## Description The submission key should be available via the Source Interface at the URL http://[Onion address]/journalist-key, but said URL returns a 404 instead. ## Steps to Reproduce - set up a production SecureDrop instance on HW or VMs - visit the source interface, click through to the submission page, and click the ``public key`` link. ## Expected Behavior The Source Interface responds with the Submission Key ## Actual Behavior The Source Interface responds with a 404, and the following errors are recorded if logging is enabled: ``` [Thu Jun 13 17:22:12.285962 2019] [authz_core:debug] [pid 7030:tid 3881911650048] mod_authz_core.c(835): [client 127.0.0.1:45900] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/lookup [Thu Jun 13 17:22:12.286129 2019] [authz_core:debug] [pid 7030:tid 3881911650048] mod_authz_core.c(835): [client 127.0.0.1:45900] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/lookup [Thu Jun 13 17:22:12.415949 2019] [wsgi:error] [pid 7028:tid 3882000488192] [remote 127.0.0.1:20954] mod_wsgi (pid=7028): Exception occurred processing WSGI script '/var/www/source.wsgi'. [Thu Jun 13 17:22:12.416036 2019] [wsgi:error] [pid 7028:tid 3882000488192] [remote 127.0.0.1:20954] TypeError: file like object yielded non string type [Thu Jun 13 17:22:12.416281 2019] [authz_core:debug] [pid 7030:tid 3881911650048] mod_authz_core.c(835): [client 127.0.0.1:45900] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/lookup [Thu Jun 13 17:22:12.416360 2019] [authz_core:debug] [pid 7030:tid 3881911650048] mod_authz_core.c(835): [client 127.0.0.1:45900] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/lookup [Thu Jun 13 17:22:13.812032 2019] [authz_core:debug] [pid 7031:tid 3882000144128] mod_authz_core.c(835): [client 127.0.0.1:45902] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/journalist-key [Thu Jun 13 17:22:14.587228 2019] [authz_core:debug] [pid 7031:tid 3881991599872] mod_authz_core.c(835): [client 127.0.0.1:45902] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/journalist-key [Thu Jun 13 17:22:14.587409 2019] [authz_core:debug] [pid 7031:tid 3881991599872] mod_authz_core.c(835): [client 127.0.0.1:45902] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/journalist-key [Thu Jun 13 17:22:15.296269 2019] [authz_core:debug] [pid 7031:tid 3881911961344] mod_authz_core.c(835): [client 127.0.0.1:45902] AH01628: authorization result: granted (no directives), referer: http://fehpa6pmg6ke4i3u.onion/journalist-key ``` Please provide screenshots where appropriate. ## Comments Suggestions to fix, any other relevant information.
2019-06-14T11:08:11Z
[]
[]
freedomofpress/securedrop
4,544
freedomofpress__securedrop-4544
[ "4415" ]
82d4cd2687916fe773916a31db7bf1f36fe4744b
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -79,7 +79,7 @@ def validate(self, document): class ValidateIP(Validator): def validate(self, document): - if re.match('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', + if re.match(r'((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', document.text): return True raise ValidationError( @@ -176,7 +176,7 @@ def validate(self, document): class ValidateInt(Validator): def validate(self, document): - if re.match('\d+$', document.text): + if re.match(r'\d+$', document.text): return True raise ValidationError(message="Must be an integer") @@ -443,7 +443,7 @@ def validated_input(self, prompt, default, validator, transform): if validator: kwargs['validator'] = validator value = prompt_toolkit.prompt(prompt, - default=unicode(default, 'utf-8'), + default=default.decode('utf-8'), **kwargs) if transform: return transform(value) @@ -702,7 +702,7 @@ def update(args): cwd=args.root) sdlog.info("Signature verification failed.") return 1 - except subprocess.CalledProcessError, e: + except subprocess.CalledProcessError as e: if 'not a valid ref' in e.output: # Then there is no duplicate branch. sdlog.info("Signature verification successful.") diff --git a/devops/scripts/combine-junit.py b/devops/scripts/combine-junit.py --- a/devops/scripts/combine-junit.py +++ b/devops/scripts/combine-junit.py @@ -61,7 +61,7 @@ def merge_results(xml_files): def usage(): this_file = os.path.basename(__file__) - print 'Usage: %s results1.xml results2.xml' % this_file + print('Usage: %s results1.xml results2.xml' % this_file) if __name__ == '__main__': diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py --- a/install_files/ansible-base/roles/backup/files/0.3_collect.py +++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py @@ -74,7 +74,7 @@ def main(): collect_custom_header_image(zf) collect_tor_files(zf) encrypt_zip_file(zf_fn) - print zf_fn + print(zf_fn) if __name__ == "__main__": diff --git a/install_files/ansible-base/roles/backup/files/backup.py b/install_files/ansible-base/roles/backup/files/backup.py --- a/install_files/ansible-base/roles/backup/files/backup.py +++ b/install_files/ansible-base/roles/backup/files/backup.py @@ -31,7 +31,7 @@ def main(): backup.add(tor_hidden_services) backup.add(torrc) - print backup_filename + print(backup_filename) if __name__ == "__main__": diff --git a/journalist_gui/journalist_gui/resources_rc.py b/journalist_gui/journalist_gui/resources_rc.py --- a/journalist_gui/journalist_gui/resources_rc.py +++ b/journalist_gui/journalist_gui/resources_rc.py @@ -873,18 +873,25 @@ \x00\x00\x01\x62\xd9\x46\xbd\x12\ " -qt_version = QtCore.qVersion().split('.') -if qt_version < ['5', '8', '0']: +qt_version = QtCore.qVersion().split(".") +if qt_version < ["5", "8", "0"]: rcc_version = 1 qt_resource_struct = qt_resource_struct_v1 else: rcc_version = 2 qt_resource_struct = qt_resource_struct_v2 + def qInitResources(): - QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + QtCore.qRegisterResourceData( + rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data + ) + def qCleanupResources(): - QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + QtCore.qUnregisterResourceData( + rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data + ) + qInitResources() diff --git a/journalist_gui/journalist_gui/updaterUI.py b/journalist_gui/journalist_gui/updaterUI.py --- a/journalist_gui/journalist_gui/updaterUI.py +++ b/journalist_gui/journalist_gui/updaterUI.py @@ -8,6 +8,7 @@ from PyQt5 import QtCore, QtGui, QtWidgets + class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") @@ -24,7 +25,9 @@ def setupUi(self, MainWindow): self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.centralwidget) self.verticalLayout_3.setObjectName("verticalLayout_3") self.label_2 = QtWidgets.QLabel(self.centralwidget) - sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed) + sizePolicy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed + ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth()) @@ -41,7 +44,9 @@ def setupUi(self, MainWindow): self.verticalLayout = QtWidgets.QVBoxLayout(self.tab) self.verticalLayout.setObjectName("verticalLayout") self.label = QtWidgets.QLabel(self.tab) - sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred) + sizePolicy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred + ) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth()) @@ -91,9 +96,20 @@ def setupUi(self, MainWindow): def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) - self.label.setText(_translate("MainWindow", "SecureDrop workstation updates are available! You should install them now. If you don\'t want to, you can install them the next time your system boots.")) - self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "SecureDrop")) - self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Command Output")) + self.label.setText( + _translate( + "MainWindow", + ( + "SecureDrop workstation updates are available! You should install them now. " + "If you don't want to, you can install them the next time your system boots." + ), + ) + ) + self.tabWidget.setTabText( + self.tabWidget.indexOf(self.tab), _translate("MainWindow", "SecureDrop") + ) + self.tabWidget.setTabText( + self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Command Output") + ) self.pushButton.setText(_translate("MainWindow", "Install Later")) self.pushButton_2.setText(_translate("MainWindow", "Install Now")) - diff --git a/molecule/vagrant-packager/package.py b/molecule/vagrant-packager/package.py --- a/molecule/vagrant-packager/package.py +++ b/molecule/vagrant-packager/package.py @@ -115,10 +115,10 @@ def vagrant_metadata(self, img_location): info_output = subprocess.check_output(["qemu-img", "info", img_location]).decode('utf-8') - json['virtual_size'] = int((re.search("virtual size: (?P<size>\d+)G", + json['virtual_size'] = int((re.search(r"virtual size: (?P<size>\d+)G", info_output)).group("size")) - json['format'] = (re.search("file format: (?P<format>\w+)", + json['format'] = (re.search(r"file format: (?P<format>\w+)", info_output)).group("format") json['provider'] = 'libvirt' @@ -219,7 +219,7 @@ def update_box_metadata(server_name, box_file, platform, version): version number, and SHA256 checksum. """ # Strip off "staging" suffix from box names - server_name_short = re.sub('\-staging$', '', server_name) + server_name_short = re.sub(r'\-staging$', '', server_name) json_file_basename = "{}_{}_metadata.json".format(server_name_short, platform) json_file = os.path.join(BOX_METADATA_DIR, json_file_basename) diff --git a/securedrop/alembic/env.py b/securedrop/alembic/env.py --- a/securedrop/alembic/env.py +++ b/securedrop/alembic/env.py @@ -24,7 +24,7 @@ # App context is needed for autogenerated migrations create_app(sdconfig).app_context().push() -except Exception as e: +except Exception: # Only reraise the exception in 'dev' where a developer actually cares if os.environ.get('SECUREDROP_ENV') == 'dev': raise diff --git a/securedrop/alembic/versions/15ac9509fc68_init.py b/securedrop/alembic/versions/15ac9509fc68_init.py --- a/securedrop/alembic/versions/15ac9509fc68_init.py +++ b/securedrop/alembic/versions/15ac9509fc68_init.py @@ -1,7 +1,7 @@ """init Revision ID: 15ac9509fc68 -Revises: +Revises: Create Date: 2018-03-30 21:20:58.280753 """ @@ -10,78 +10,84 @@ # revision identifiers, used by Alembic. -revision = '15ac9509fc68' +revision = "15ac9509fc68" down_revision = None branch_labels = None depends_on = None def upgrade(): - op.create_table('journalists', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('pw_salt', sa.Binary(), nullable=True), - sa.Column('pw_hash', sa.Binary(), nullable=True), - sa.Column('is_admin', sa.Boolean(), nullable=True), - sa.Column('otp_secret', sa.String(length=16), nullable=True), - sa.Column('is_totp', sa.Boolean(), nullable=True), - sa.Column('hotp_counter', sa.Integer(), nullable=True), - sa.Column('last_token', sa.String(length=6), nullable=True), - sa.Column('created_on', sa.DateTime(), nullable=True), - sa.Column('last_access', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('username') + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), ) - op.create_table('sources', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('filesystem_id', sa.String(length=96), nullable=True), - sa.Column('journalist_designation', sa.String(length=255), nullable=False), - sa.Column('flagged', sa.Boolean(), nullable=True), - sa.Column('last_updated', sa.DateTime(), nullable=True), - sa.Column('pending', sa.Boolean(), nullable=True), - sa.Column('interaction_count', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('filesystem_id') + op.create_table( + "sources", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("filesystem_id", sa.String(length=96), nullable=True), + sa.Column("journalist_designation", sa.String(length=255), nullable=False), + sa.Column("flagged", sa.Boolean(), nullable=True), + sa.Column("last_updated", sa.DateTime(), nullable=True), + sa.Column("pending", sa.Boolean(), nullable=True), + sa.Column("interaction_count", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("filesystem_id"), ) - op.create_table('journalist_login_attempt', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('timestamp', sa.DateTime(), nullable=True), - sa.Column('journalist_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "journalist_login_attempt", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("timestamp", sa.DateTime(), nullable=True), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('replies', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('journalist_id', sa.Integer(), nullable=True), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('source_stars', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('starred', sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "source_stars", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("starred", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('submissions', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.Column('downloaded', sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "submissions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("downloaded", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), ) def downgrade(): - op.drop_table('submissions') - op.drop_table('source_stars') - op.drop_table('replies') - op.drop_table('journalist_login_attempt') - op.drop_table('sources') - op.drop_table('journalists') + op.drop_table("submissions") + op.drop_table("source_stars") + op.drop_table("replies") + op.drop_table("journalist_login_attempt") + op.drop_table("sources") + op.drop_table("journalists") diff --git a/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py --- a/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py +++ b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py @@ -1,7 +1,7 @@ """added passphrase_hash column to journalists table Revision ID: 2d0ce3ee5bdc -Revises: fccf57ceef02 +Revises: fccf57ceef02 Create Date: 2018-06-08 15:08:37.718268 """ @@ -10,44 +10,47 @@ # revision identifiers, used by Alembic. -revision = '2d0ce3ee5bdc' -down_revision = 'fccf57ceef02' +revision = "2d0ce3ee5bdc" +down_revision = "fccf57ceef02" branch_labels = None depends_on = None def upgrade(): - op.add_column('journalists', sa.Column('passphrase_hash', sa.String(length=256), nullable=True)) + op.add_column("journalists", sa.Column("passphrase_hash", sa.String(length=256), nullable=True)) def downgrade(): # sqlite has no `drop column` command, so we recreate the original table # then load it from a temp table - op.rename_table('journalists', 'journalists_tmp') - - op.create_table('journalists', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('pw_salt', sa.Binary(), nullable=True), - sa.Column('pw_hash', sa.Binary(), nullable=True), - sa.Column('is_admin', sa.Boolean(), nullable=True), - sa.Column('otp_secret', sa.String(length=16), nullable=True), - sa.Column('is_totp', sa.Boolean(), nullable=True), - sa.Column('hotp_counter', sa.Integer(), nullable=True), - sa.Column('last_token', sa.String(length=6), nullable=True), - sa.Column('created_on', sa.DateTime(), nullable=True), - sa.Column('last_access', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('username') + op.rename_table("journalists", "journalists_tmp") + + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), ) conn = op.get_bind() - conn.execute(''' + conn.execute( + """ INSERT INTO journalists SELECT id, username, pw_salt, pw_hash, is_admin, otp_secret, is_totp, hotp_counter, last_token, created_on, last_access FROM journalists_tmp - ''') - - op.drop_table('journalists_tmp') + """ + ) + + op.drop_table("journalists_tmp") diff --git a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py --- a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py +++ b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py @@ -8,22 +8,21 @@ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import quoted_name -import subprocess import uuid # revision identifiers, used by Alembic. -revision = '3d91d6948753' -down_revision = 'faac8092c123' +revision = "3d91d6948753" +down_revision = "faac8092c123" branch_labels = None depends_on = None def upgrade(): # Schema migration - op.rename_table('sources', 'sources_tmp') + op.rename_table("sources", "sources_tmp") # Add UUID column. - op.add_column('sources_tmp', sa.Column('uuid', sa.String(length=36))) + op.add_column("sources_tmp", sa.Column("uuid", sa.String(length=36))) # Add UUIDs to sources_tmp table. conn = op.get_bind() @@ -31,39 +30,42 @@ def upgrade(): for source in sources: conn.execute( - sa.text("""UPDATE sources_tmp SET uuid=:source_uuid WHERE - id=:id""").bindparams(source_uuid=str(uuid.uuid4()), - id=source.id) - ) + sa.text( + """UPDATE sources_tmp SET uuid=:source_uuid WHERE + id=:id""" + ).bindparams(source_uuid=str(uuid.uuid4()), id=source.id) + ) # Now create new table with unique constraint applied. - op.create_table(quoted_name('sources', quote=False), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('filesystem_id', sa.String(length=96), nullable=True), - sa.Column('journalist_designation', sa.String(length=255), - nullable=False), - sa.Column('flagged', sa.Boolean(), nullable=True), - sa.Column('last_updated', sa.DateTime(), nullable=True), - sa.Column('pending', sa.Boolean(), nullable=True), - sa.Column('interaction_count', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('uuid'), - sa.UniqueConstraint('filesystem_id') + op.create_table( + quoted_name("sources", quote=False), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("filesystem_id", sa.String(length=96), nullable=True), + sa.Column("journalist_designation", sa.String(length=255), nullable=False), + sa.Column("flagged", sa.Boolean(), nullable=True), + sa.Column("last_updated", sa.DateTime(), nullable=True), + sa.Column("pending", sa.Boolean(), nullable=True), + sa.Column("interaction_count", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + sa.UniqueConstraint("filesystem_id"), ) # Data Migration: move all sources into the new table. - conn.execute(''' + conn.execute( + """ INSERT INTO sources SELECT id, uuid, filesystem_id, journalist_designation, flagged, last_updated, pending, interaction_count FROM sources_tmp - ''') + """ + ) # Now delete the old table. - op.drop_table('sources_tmp') + op.drop_table("sources_tmp") def downgrade(): - with op.batch_alter_table('sources', schema=None) as batch_op: - batch_op.drop_column('uuid') + with op.batch_alter_table("sources", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py --- a/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py +++ b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py @@ -11,60 +11,63 @@ import uuid # revision identifiers, used by Alembic. -revision = '6db892e17271' -down_revision = 'e0a525cbab83' +revision = "6db892e17271" +down_revision = "e0a525cbab83" branch_labels = None depends_on = None def upgrade(): # Schema migration - op.rename_table('replies', 'replies_tmp') + op.rename_table("replies", "replies_tmp") # Add new column. - op.add_column('replies_tmp', sa.Column('uuid', sa.String(length=36))) + op.add_column("replies_tmp", sa.Column("uuid", sa.String(length=36))) # Populate new column in replies_tmp table. conn = op.get_bind() - replies = conn.execute( - sa.text("SELECT * FROM replies_tmp")).fetchall() + replies = conn.execute(sa.text("SELECT * FROM replies_tmp")).fetchall() for reply in replies: conn.execute( - sa.text("""UPDATE replies_tmp SET uuid=:reply_uuid WHERE - id=:id""").bindparams(reply_uuid=str(uuid.uuid4()), - id=reply.id) - ) + sa.text( + """UPDATE replies_tmp SET uuid=:reply_uuid WHERE + id=:id""" + ).bindparams(reply_uuid=str(uuid.uuid4()), id=reply.id) + ) # Now create new table with constraints applied to UUID column. - op.create_table('replies', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('journalist_id', sa.Integer(), nullable=True), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.Column('deleted_by_source', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('uuid'), + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("deleted_by_source", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), ) # Data Migration: move all replies into the new table. - conn.execute(''' + conn.execute( + """ INSERT INTO replies SELECT id, uuid, journalist_id, source_id, filename, size, deleted_by_source FROM replies_tmp - ''') + """ + ) # Now delete the old table. - op.drop_table('replies_tmp') + op.drop_table("replies_tmp") def downgrade(): - with op.batch_alter_table('replies', schema=None) as batch_op: - batch_op.drop_column('uuid') + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("uuid") # ### end Alembic commands ### diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py --- a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py @@ -8,7 +8,7 @@ import sqlalchemy as sa # raise the errors if we're not in production -raise_errors = os.environ.get('SECUREDROP_ENV', 'prod') != 'prod' +raise_errors = os.environ.get("SECUREDROP_ENV", "prod") != "prod" try: from journalist_app import create_app @@ -16,32 +16,32 @@ from sdconfig import config from store import queued_add_checksum_for_file from worker import rq_worker_queue -except: +except: # noqa if raise_errors: raise # revision identifiers, used by Alembic. -revision = 'b58139cfdc8c' -down_revision = 'f2833ac34bb6' +revision = "b58139cfdc8c" +down_revision = "f2833ac34bb6" branch_labels = None depends_on = None def upgrade(): - with op.batch_alter_table('replies', schema=None) as batch_op: - batch_op.add_column(sa.Column('checksum', sa.String(length=255), nullable=True)) + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.add_column(sa.Column("checksum", sa.String(length=255), nullable=True)) - with op.batch_alter_table('submissions', schema=None) as batch_op: - batch_op.add_column(sa.Column('checksum', sa.String(length=255), nullable=True)) + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.add_column(sa.Column("checksum", sa.String(length=255), nullable=True)) op.create_table( - 'revoked_tokens', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('journalist_id', sa.Integer(), nullable=True), - sa.Column('token', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('token') + "revoked_tokens", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("token", sa.Text(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("token"), ) try: @@ -50,11 +50,13 @@ def upgrade(): # we need an app context for the rq worker extension to work properly with app.app_context(): conn = op.get_bind() - query = sa.text('''SELECT submissions.id, sources.filesystem_id, submissions.filename + query = sa.text( + """SELECT submissions.id, sources.filesystem_id, submissions.filename FROM submissions INNER JOIN sources ON submissions.source_id = sources.id - ''') + """ + ) for (sub_id, filesystem_id, filename) in conn.execute(query): full_path = app.storage.path(filesystem_id, filename) rq_worker_queue.enqueue( @@ -62,14 +64,16 @@ def upgrade(): Submission, int(sub_id), full_path, - app.config['SQLALCHEMY_DATABASE_URI'], + app.config["SQLALCHEMY_DATABASE_URI"], ) - query = sa.text('''SELECT replies.id, sources.filesystem_id, replies.filename + query = sa.text( + """SELECT replies.id, sources.filesystem_id, replies.filename FROM replies INNER JOIN sources ON replies.source_id = sources.id - ''') + """ + ) for (rep_id, filesystem_id, filename) in conn.execute(query): full_path = app.storage.path(filesystem_id, filename) rq_worker_queue.enqueue( @@ -77,18 +81,18 @@ def upgrade(): Reply, int(rep_id), full_path, - app.config['SQLALCHEMY_DATABASE_URI'], + app.config["SQLALCHEMY_DATABASE_URI"], ) - except: + except: # noqa if raise_errors: raise def downgrade(): - op.drop_table('revoked_tokens') + op.drop_table("revoked_tokens") - with op.batch_alter_table('submissions', schema=None) as batch_op: - batch_op.drop_column('checksum') + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.drop_column("checksum") - with op.batch_alter_table('replies', schema=None) as batch_op: - batch_op.drop_column('checksum') + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("checksum") diff --git a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py --- a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py +++ b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py @@ -10,56 +10,59 @@ # revision identifiers, used by Alembic. -revision = 'e0a525cbab83' -down_revision = '2d0ce3ee5bdc' +revision = "e0a525cbab83" +down_revision = "2d0ce3ee5bdc" branch_labels = None depends_on = None def upgrade(): # Schema migration - op.rename_table('replies', 'replies_tmp') + op.rename_table("replies", "replies_tmp") # Add new column. - op.add_column('replies_tmp', - sa.Column('deleted_by_source', sa.Boolean())) + op.add_column("replies_tmp", sa.Column("deleted_by_source", sa.Boolean())) # Populate deleted_by_source column in replies_tmp table. conn = op.get_bind() - replies = conn.execute( - sa.text("SELECT * FROM replies_tmp")).fetchall() + replies = conn.execute(sa.text("SELECT * FROM replies_tmp")).fetchall() for reply in replies: conn.execute( - sa.text("""UPDATE replies_tmp SET deleted_by_source=0 WHERE - id=:id""").bindparams(id=reply.id) - ) + sa.text( + """UPDATE replies_tmp SET deleted_by_source=0 WHERE + id=:id""" + ).bindparams(id=reply.id) + ) # Now create new table with not null constraint applied to # deleted_by_source. - op.create_table('replies', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('journalist_id', sa.Integer(), nullable=True), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.Column('deleted_by_source', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("deleted_by_source", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), ) # Data Migration: move all replies into the new table. - conn.execute(''' + conn.execute( + """ INSERT INTO replies SELECT id, journalist_id, source_id, filename, size, deleted_by_source FROM replies_tmp - ''') + """ + ) # Now delete the old table. - op.drop_table('replies_tmp') + op.drop_table("replies_tmp") def downgrade(): - with op.batch_alter_table('replies', schema=None) as batch_op: - batch_op.drop_column('deleted_by_source') + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("deleted_by_source") diff --git a/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py --- a/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py +++ b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py @@ -11,64 +11,67 @@ # revision identifiers, used by Alembic. -revision = 'f2833ac34bb6' -down_revision = '6db892e17271' +revision = "f2833ac34bb6" +down_revision = "6db892e17271" branch_labels = None depends_on = None def upgrade(): # Save existing journalist table. - op.rename_table('journalists', 'journalists_tmp') + op.rename_table("journalists", "journalists_tmp") # Add UUID column. - op.add_column('journalists_tmp', sa.Column('uuid', sa.String(length=36))) + op.add_column("journalists_tmp", sa.Column("uuid", sa.String(length=36))) # Add UUIDs to journalists_tmp table. conn = op.get_bind() - journalists = conn.execute( - sa.text("SELECT * FROM journalists_tmp")).fetchall() + journalists = conn.execute(sa.text("SELECT * FROM journalists_tmp")).fetchall() for journalist in journalists: conn.execute( - sa.text("""UPDATE journalists_tmp SET uuid=:journalist_uuid WHERE - id=:id""").bindparams(journalist_uuid=str(uuid.uuid4()), - id=journalist.id) - ) + sa.text( + """UPDATE journalists_tmp SET uuid=:journalist_uuid WHERE + id=:id""" + ).bindparams(journalist_uuid=str(uuid.uuid4()), id=journalist.id) + ) # Now create new table with unique constraint applied. - op.create_table('journalists', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('pw_salt', sa.Binary(), nullable=True), - sa.Column('pw_hash', sa.Binary(), nullable=True), - sa.Column('passphrase_hash', sa.String(length=256), nullable=True), - sa.Column('is_admin', sa.Boolean(), nullable=True), - sa.Column('otp_secret', sa.String(length=16), nullable=True), - sa.Column('is_totp', sa.Boolean(), nullable=True), - sa.Column('hotp_counter', sa.Integer(), nullable=True), - sa.Column('last_token', sa.String(length=6), nullable=True), - sa.Column('created_on', sa.DateTime(), nullable=True), - sa.Column('last_access', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('username'), - sa.UniqueConstraint('uuid') + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("passphrase_hash", sa.String(length=256), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + sa.UniqueConstraint("uuid"), ) conn = op.get_bind() - conn.execute(''' + conn.execute( + """ INSERT INTO journalists SELECT id, uuid, username, pw_salt, pw_hash, passphrase_hash, is_admin, otp_secret, is_totp, hotp_counter, last_token, created_on, last_access FROM journalists_tmp - ''') + """ + ) # Now delete the old table. - op.drop_table('journalists_tmp') + op.drop_table("journalists_tmp") def downgrade(): - with op.batch_alter_table('journalists', schema=None) as batch_op: - batch_op.drop_column('uuid') + with op.batch_alter_table("journalists", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py --- a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py +++ b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py @@ -11,55 +11,58 @@ import uuid # revision identifiers, used by Alembic. -revision = 'fccf57ceef02' -down_revision = '3d91d6948753' +revision = "fccf57ceef02" +down_revision = "3d91d6948753" branch_labels = None depends_on = None def upgrade(): # Schema migration - op.rename_table('submissions', 'submissions_tmp') + op.rename_table("submissions", "submissions_tmp") # Add UUID column. - op.add_column('submissions_tmp', sa.Column('uuid', sa.String(length=36))) + op.add_column("submissions_tmp", sa.Column("uuid", sa.String(length=36))) # Add UUIDs to submissions_tmp table. conn = op.get_bind() - submissions = conn.execute( - sa.text("SELECT * FROM submissions_tmp")).fetchall() + submissions = conn.execute(sa.text("SELECT * FROM submissions_tmp")).fetchall() for submission in submissions: conn.execute( - sa.text("""UPDATE submissions_tmp SET uuid=:submission_uuid WHERE - id=:id""").bindparams(submission_uuid=str(uuid.uuid4()), - id=submission.id) - ) + sa.text( + """UPDATE submissions_tmp SET uuid=:submission_uuid WHERE + id=:id""" + ).bindparams(submission_uuid=str(uuid.uuid4()), id=submission.id) + ) # Now create new table with unique constraint applied. - op.create_table('submissions', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('filename', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.Column('downloaded', sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('uuid') + op.create_table( + "submissions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("downloaded", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), ) # Data Migration: move all submissions into the new table. - conn.execute(''' + conn.execute( + """ INSERT INTO submissions SELECT id, uuid, source_id, filename, size, downloaded FROM submissions_tmp - ''') + """ + ) # Now delete the old table. - op.drop_table('submissions_tmp') + op.drop_table("submissions_tmp") def downgrade(): - with op.batch_alter_table('submissions', schema=None) as batch_op: - batch_op.drop_column('uuid') + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -38,16 +38,16 @@ def monkey_patch_delete_handle_status(self, key, value): - """Parse a status code from the attached GnuPG process. - :raises: :exc:`~exceptions.ValueError` if the status message is unknown. - """ - if key in ("DELETE_PROBLEM", "KEY_CONSIDERED"): - self.status = self.problem_reason.get(value, "Unknown error: %r" - % value) - elif key in ("PINENTRY_LAUNCHED"): - self.status = key.replace("_", " ").lower() - else: - raise ValueError("Unknown status message: %r" % key) + """ + Parse a status code from the attached GnuPG process. + :raises: :exc:`~exceptions.ValueError` if the status message is unknown. + """ + if key in ("DELETE_PROBLEM", "KEY_CONSIDERED"): + self.status = self.problem_reason.get(value, "Unknown error: %r" % value) + elif key in ("PINENTRY_LAUNCHED"): + self.status = key.replace("_", " ").lower() + else: + raise ValueError("Unknown status message: %r" % key) # Monkey patching to resolve https://github.com/freedomofpress/securedrop/issues/4294 diff --git a/securedrop/i18n.py b/securedrop/i18n.py --- a/securedrop/i18n.py +++ b/securedrop/i18n.py @@ -42,9 +42,7 @@ def setup_app(config, app): translation_dirs = getattr(config, 'TRANSLATION_DIRS', None) if translation_dirs is None: - translation_dirs = \ - path.join(path.dirname(path.realpath(__file__)), - 'translations') + translation_dirs = path.join(path.dirname(path.realpath(__file__)), 'translations') # `babel.translation_directories` is a nightmare # We need to set this manually via an absolute path diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -30,7 +30,6 @@ # statements has to be marked as noqa. # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 from typing import Callable, Optional, Union, Dict, List, Any # noqa: F401 - from tempfile import _TemporaryFileWrapper # noqa: F401 from io import BufferedIOBase # noqa: F401 from logging import Logger # noqa: F401 from sqlalchemy import Query # noqa: F401 @@ -670,10 +669,10 @@ def login(cls, username, password, token): return user def generate_api_token(self, expiration): - # type: (int) -> unicode + # type: (int) -> str s = TimedJSONWebSignatureSerializer( current_app.config['SECRET_KEY'], expires_in=expiration) - return s.dumps({'id': self.id}).decode('ascii') + return s.dumps({'id': self.id}).decode('ascii') # type:ignore @staticmethod def validate_token_is_not_expired_or_invalid(token): diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py --- a/securedrop/secure_tempfile.py +++ b/securedrop/secure_tempfile.py @@ -3,7 +3,7 @@ import os import io import six -from tempfile import _TemporaryFileWrapper +from tempfile import _TemporaryFileWrapper # type: ignore from pretty_bad_protocol._util import _STREAMLIKE_TYPES from cryptography.exceptions import AlreadyFinalized @@ -91,11 +91,7 @@ def write(self, data): self.last_action = 'write' # This is the old Python related code - if six.PY2: # noqa - if isinstance(data, unicode): - data = data.encode('utf-8') - elif isinstance(data, str): # noqa - # For Python 3 + if isinstance(data, six.text_type): data = data.encode('utf-8') self.file.write(self.encryptor.update(data)) diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py --- a/securedrop/source_app/utils.py +++ b/securedrop/source_app/utils.py @@ -65,18 +65,18 @@ def get_entropy_estimate(): return int(f.read()) -def async(f): +def asynchronous(f): def wrapper(*args, **kwargs): thread = Thread(target=f, args=args, kwargs=kwargs) thread.start() return wrapper -@async +@asynchronous def async_genkey(crypto_util_, db_uri, filesystem_id, codename): # We pass in the `crypto_util_` so we don't have to reference `current_app` # here. The app might not have a pushed context during testing which would - # cause this async function to break. + # cause this asynchronous function to break. crypto_util_.genkeypair(filesystem_id, codename) # Register key generation as update to the source, so sources will diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -25,15 +25,15 @@ # statements has to be marked as noqa. # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 from typing import List, Type, Union # noqa: F401 - from tempfile import _TemporaryFileWrapper # noqa: F401 + from tempfile import _TemporaryFileWrapper # type: ignore # noqa: F401 from io import BufferedIOBase # noqa: F401 from sqlalchemy.orm import Session # noqa: F401 from models import Reply, Submission # noqa: F401 VALIDATE_FILENAME = re.compile( - "^(?P<index>\d+)\-[a-z0-9-_]*" - "(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match + r"^(?P<index>\d+)\-[a-z0-9-_]*" + r"(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match class PathException(Exception):
diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -132,34 +132,34 @@ def verify_username_prompt(child): def verify_reboot_prompt(child): child.expect( - "Daily reboot time of the server \(24\-hour clock\):", timeout=2) + r"Daily reboot time of the server \(24\-hour clock\):", timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' 4' # Expected default def verify_ipv4_appserver_prompt(child): - child.expect('Local IPv4 address for the Application Server\:', timeout=2) + child.expect(r'Local IPv4 address for the Application Server\:', timeout=2) # Expected default assert ANSI_ESCAPE.sub('', child.buffer) == ' 10.20.2.2' def verify_ipv4_monserver_prompt(child): - child.expect('Local IPv4 address for the Monitor Server\:', timeout=2) + child.expect(r'Local IPv4 address for the Monitor Server\:', timeout=2) # Expected default assert ANSI_ESCAPE.sub('', child.buffer) == ' 10.20.3.2' def verify_hostname_app_prompt(child): - child.expect('Hostname for Application Server\:', timeout=2) + child.expect(r'Hostname for Application Server\:', timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' app' # Expected default def verify_hostname_mon_prompt(child): - child.expect('Hostname for Monitor Server\:', timeout=2) + child.expect(r'Hostname for Monitor Server\:', timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' mon' # Expected default def verify_dns_prompt(child): - child.expect('DNS server specified during installation\:', timeout=2) + child.expect(r'DNS server specified during installation\:', timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' 8.8.8.8' # Expected default @@ -172,15 +172,15 @@ def verify_https_prompt(child): def verify_https_cert_prompt(child): - child.expect('Local filepath to HTTPS certificate\:', timeout=2) + child.expect(r'Local filepath to HTTPS certificate\:', timeout=2) def verify_https_cert_key_prompt(child): - child.expect('Local filepath to HTTPS certificate key\:', timeout=2) + child.expect(r'Local filepath to HTTPS certificate key\:', timeout=2) def verify_https_cert_chain_file_prompt(child): - child.expect('Local filepath to HTTPS certificate chain file\:', timeout=2) + child.expect(r'Local filepath to HTTPS certificate chain file\:', timeout=2) # noqa: E501 def verify_app_gpg_fingerprint_prompt(child): @@ -212,32 +212,32 @@ def verify_journalist_email_prompt(child): def verify_smtp_relay_prompt(child): - child.expect('SMTP relay for sending OSSEC alerts\:', timeout=2) + child.expect(r'SMTP relay for sending OSSEC alerts\:', timeout=2) # Expected default assert ANSI_ESCAPE.sub('', child.buffer) == ' smtp.gmail.com' def verify_smtp_port_prompt(child): - child.expect('SMTP port for sending OSSEC alerts\:', timeout=2) + child.expect(r'SMTP port for sending OSSEC alerts\:', timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' 587' # Expected default def verify_sasl_domain_prompt(child): - child.expect('SASL domain for sending OSSEC alerts\:', timeout=2) + child.expect(r'SASL domain for sending OSSEC alerts\:', timeout=2) # Expected default assert ANSI_ESCAPE.sub('', child.buffer) == ' gmail.com' def verify_sasl_username_prompt(child): - child.expect('SASL username for sending OSSEC alerts\:', timeout=2) + child.expect(r'SASL username for sending OSSEC alerts\:', timeout=2) def verify_sasl_password_prompt(child): - child.expect('SASL password for sending OSSEC alerts\:', timeout=2) + child.expect(r'SASL password for sending OSSEC alerts\:', timeout=2) def verify_ssh_over_lan_prompt(child): - child.expect('will be available over LAN only\:', timeout=2) + child.expect(r'will be available over LAN only\:', timeout=2) assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default @@ -488,7 +488,7 @@ def set_reliable_keyserver(gpgdir): f.write('keyserver hkp://ipv4.pool.sks-keyservers.net') # Ensure correct permissions on .gnupg home directory. - os.chmod(gpgdir, 0700) + os.chmod(gpgdir, 0o700) @flaky(max_runs=3) diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst --- a/docs/development/testing_application_tests.rst +++ b/docs/development/testing_application_tests.rst @@ -110,7 +110,7 @@ tests are stored in the functional test directory:: ├── utils │ ├── db_helper.py │ ├── env.py - │ └── async.py + │ └── asynchronous.py ├── test_journalist.py ├── test_source.py │ ... diff --git a/molecule/builder-xenial/tests/test_securedrop_deb_package.py b/molecule/builder-xenial/tests/test_securedrop_deb_package.py --- a/molecule/builder-xenial/tests/test_securedrop_deb_package.py +++ b/molecule/builder-xenial/tests/test_securedrop_deb_package.py @@ -25,7 +25,7 @@ def extract_package_name_from_filepath(filepath): which can then be used for comparisons in dpkg output. """ deb_basename = os.path.basename(filepath) - package_name = re.search('^([a-z\-]+(?!\d))', deb_basename).groups()[0] + package_name = re.search(r'^([a-z\-]+(?!\d))', deb_basename).groups()[0] assert deb_basename.startswith(package_name) return package_name @@ -164,7 +164,7 @@ def test_deb_package_contains_no_config_file(host, deb): deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) c = host.run("dpkg-deb --contents {}".format(deb_package.path)) - assert not re.search("^.*/config\.py$", c.stdout, re.M) + assert not re.search(r"^.*/config\.py$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) @@ -192,7 +192,7 @@ def test_deb_package_contains_mo_file(host, deb): c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # Only relevant for the securedrop-app-code package: if "securedrop-app-code" in deb_package.path: - assert re.search("^.*messages\.mo$", c.stdout, re.M) + assert re.search(r"^.*messages\.mo$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) @@ -209,17 +209,17 @@ def test_deb_package_contains_no_generated_assets(host, deb): if "securedrop-app-code" in deb_package.path: c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # static/gen/ directory should exist - assert re.search("^.*\./var/www/securedrop" + assert re.search(r"^.*\./var/www/securedrop" "/static/gen/$", c.stdout, re.M) # static/gen/ directory should be empty - assert not re.search("^.*\./var/www/securedrop" + assert not re.search(r"^.*\./var/www/securedrop" "/static/gen/.+$", c.stdout, re.M) # static/.webassets-cache/ directory should exist - assert re.search("^.*\./var/www/securedrop" + assert re.search(r"^.*\./var/www/securedrop" "/static/.webassets-cache/$", c.stdout, re.M) # static/.webassets-cache/ directory should be empty - assert not re.search("^.*\./var/www/securedrop" + assert not re.search(r"^.*\./var/www/securedrop" "/static/.webassets-cache/.+$", c.stdout, re.M) # no SASS files should exist; only the generated CSS files. @@ -276,7 +276,7 @@ def test_deb_package_contains_css(host, deb): c = host.run("dpkg-deb --contents {}".format(deb_package.path)) for css_type in ['journalist', 'source']: - assert re.search("^.*\./var/www/securedrop/static/" + assert re.search(r"^.*\./var/www/securedrop/static/" "css/{}.css$".format(css_type), c.stdout, re.M) @@ -305,7 +305,7 @@ def test_deb_app_package_contains_https_validate_dir(host, deb): if "securedrop-app-code" in deb_package.path: c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # well-known/pki-validation directory should exist - assert re.search("^.*\./var/www/securedrop/" + assert re.search(r"^.*\./var/www/securedrop/" ".well-known/pki-validation/$", c.stdout, re.M) @@ -323,11 +323,11 @@ def test_grsec_metapackage(host, deb): if "securedrop-grsec" in deb_package.path: c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # Custom sysctl options should be present - assert re.search("^.*\./etc/sysctl.d/30-securedrop.conf$", + assert re.search(r"^.*\./etc/sysctl.d/30-securedrop.conf$", c.stdout, re.M) c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # Post-install kernel hook for managing PaX flags must exist. - assert re.search("^.*\./etc/kernel/postinst.d/paxctl-grub$", + assert re.search(r"^.*\./etc/kernel/postinst.d/paxctl-grub$", c.stdout, re.M) @@ -367,7 +367,7 @@ def test_control_helper_files_are_present(host, deb): ] c = host.run("dpkg-deb --info {}".format(deb_package.path)) for wanted_file in wanted_files: - assert re.search("^\s+?\d+ bytes,\s+\d+ lines[\s*]+"+wanted_file+"\s+.*$", + assert re.search(r"^\s+?\d+ bytes,\s+\d+ lines[\s*]+"+wanted_file+r"\s+.*$", c.stdout, re.M) @@ -383,7 +383,7 @@ def test_jinja_files_not_present(host, deb): c = host.run("dpkg-deb --contents {}".format(deb_package.path)) # There shouldn't be any files with a .j2 ending - assert not re.search("^.*\.j2$", c.stdout, re.M) + assert not re.search(r"^.*\.j2$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) diff --git a/molecule/testinfra/staging/app-code/test_haveged.py b/molecule/testinfra/staging/app-code/test_haveged.py --- a/molecule/testinfra/staging/app-code/test_haveged.py +++ b/molecule/testinfra/staging/app-code/test_haveged.py @@ -9,7 +9,7 @@ def test_haveged_config(host): assert f.is_file assert f.user == 'root' assert f.group == 'root' - assert oct(f.mode) == '0644' + assert f.mode == 0o644 assert f.contains('^DAEMON_ARGS="-w 2400"$') diff --git a/molecule/testinfra/staging/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py --- a/molecule/testinfra/staging/app-code/test_redis_worker.py +++ b/molecule/testinfra/staging/app-code/test_redis_worker.py @@ -40,6 +40,6 @@ def test_redis_worker_config_file(host): """ f = host.file('/etc/supervisor/conf.d/securedrop_worker.conf') assert f.is_file - assert oct(f.mode) == '0644' + assert f.mode == 0o644 assert f.user == "root" assert f.group == "root" diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -43,9 +43,9 @@ def test_securedrop_application_test_locale(host): with host.sudo(): assert securedrop_config.is_file assert securedrop_config.contains("^DEFAULT_LOCALE") - assert securedrop_config.content.count("DEFAULT_LOCALE") == 1 - assert securedrop_config.contains("^SUPPORTED_LOCALES = \[u'el', u'ar', 'en_US'\]") - assert securedrop_config.content.count("SUPPORTED_LOCALES") == 1 + assert securedrop_config.content_string.count("DEFAULT_LOCALE") == 1 + assert securedrop_config.content_string.count("SUPPORTED_LOCALES") == 1 + assert "\nSUPPORTED_LOCALES = [u'el', u'ar', 'en_US']\n" in securedrop_config.content_string def test_securedrop_application_test_journalist_key(host): @@ -62,7 +62,7 @@ def test_securedrop_application_test_journalist_key(host): assert pubkey_file.is_file assert pubkey_file.user == "root" assert pubkey_file.group == "root" - assert oct(pubkey_file.mode) == "0644" + assert pubkey_file.mode == 0o644 # Let's make sure the corresponding fingerprint is specified # in the SecureDrop app configuration. @@ -74,7 +74,7 @@ def test_securedrop_application_test_journalist_key(host): securedrop_test_vars.securedrop_user assert securedrop_config.group == \ securedrop_test_vars.securedrop_user - assert oct(securedrop_config.mode) == "0600" + assert securedrop_config.mode == 0o600 assert securedrop_config.contains( "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$") @@ -91,4 +91,4 @@ def test_securedrop_application_sqlite_db(host): assert f.is_file assert f.user == securedrop_test_vars.securedrop_user assert f.group == securedrop_test_vars.securedrop_user - assert oct(f.mode) == "0640" + assert f.mode == 0o640 diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -30,9 +30,9 @@ def test_apache_headers_journalist_interface(host, header): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 header_regex = "^{}$".format(re.escape(header)) - assert re.search(header_regex, f.content, re.M) + assert re.search(header_regex, f.content_string, re.M) # Block of directory declarations for Apache vhost is common @@ -116,9 +116,9 @@ def test_apache_config_journalist_interface(host, apache_opt): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex = "^{}$".format(re.escape(apache_opt)) - assert re.search(regex, f.content, re.M) + assert re.search(regex, f.content_string, re.M) def test_apache_journalist_interface_vhost(host): @@ -127,7 +127,7 @@ def test_apache_journalist_interface_vhost(host): for serving Journalist Interface application code. """ f = host.file("/etc/apache2/sites-available/journalist.conf") - assert common_apache2_directory_declarations in f.content + assert common_apache2_directory_declarations in f.content_string def test_apache_logging_journalist_interface(host): diff --git a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py @@ -14,9 +14,9 @@ def test_apache_headers_source_interface(host, header): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 header_regex = "^{}$".format(re.escape(header)) - assert re.search(header_regex, f.content, re.M) + assert re.search(header_regex, f.content_string, re.M) @pytest.mark.parametrize("apache_opt", [ @@ -59,6 +59,6 @@ def test_apache_config_source_interface(host, apache_opt): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex = "^{}$".format(re.escape(apache_opt)) - assert re.search(regex, f.content, re.M) + assert re.search(regex, f.content_string, re.M) diff --git a/molecule/testinfra/staging/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/molecule/testinfra/staging/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -58,8 +58,8 @@ def test_apache_config_settings(host, apache_opt): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" - assert re.search("^{}$".format(re.escape(apache_opt)), f.content, re.M) + assert f.mode == 0o644 + assert re.search("^{}$".format(re.escape(apache_opt)), f.content_string, re.M) @pytest.mark.parametrize("port", [ @@ -78,7 +78,7 @@ def test_apache_ports_config(host, port): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 listening_regex = "^Listen {}:{}$".format(re.escape( securedrop_test_vars.apache_listening_address), port) diff --git a/molecule/testinfra/staging/app/test_app_network.py b/molecule/testinfra/staging/app/test_app_network.py --- a/molecule/testinfra/staging/app/test_app_network.py +++ b/molecule/testinfra/staging/app/test_app_network.py @@ -22,7 +22,7 @@ def test_app_iptables_rules(host): dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters - iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" + iptables = r"iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" environment = os.environ.get("CI_SD_ENV", "staging") iptables_file = "{}/iptables-app-{}.j2".format( os.path.dirname(os.path.abspath(__file__)), diff --git a/molecule/testinfra/staging/app/test_apparmor.py b/molecule/testinfra/staging/app/test_apparmor.py --- a/molecule/testinfra/staging/app/test_apparmor.py +++ b/molecule/testinfra/staging/app/test_apparmor.py @@ -28,15 +28,15 @@ def test_apparmor_enabled(host): @pytest.mark.parametrize('cap', apache2_capabilities) def test_apparmor_apache_capabilities(host, cap): """ check for exact list of expected app-armor capabilities for apache2 """ - c = host.run("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' " - "/etc/apparmor.d/usr.sbin.apache2") + c = host.run( + r"perl -nE '/^\s+capability\s+(\w+),$/ && say $1' /etc/apparmor.d/usr.sbin.apache2" + ) assert cap in c.stdout def test_apparmor_apache_exact_capabilities(host): """ ensure no extra capabilities are defined for apache2 """ - c = host.check_output("grep -ic capability " - "/etc/apparmor.d/usr.sbin.apache2") + c = host.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.apache2") assert str(len(apache2_capabilities)) == c @@ -46,8 +46,7 @@ def test_apparmor_apache_exact_capabilities(host): @pytest.mark.parametrize('cap', tor_capabilities) def test_apparmor_tor_capabilities(host, cap): """ check for exact list of expected app-armor capabilities for tor """ - c = host.run("perl -nE \'/^\s+capability\s+(\w+),$/ && " - "say $1\' /etc/apparmor.d/usr.sbin.tor") + c = host.run(r"perl -nE '/^\s+capability\s+(\w+),$/ && say $1' /etc/apparmor.d/usr.sbin.tor") assert cap in c.stdout diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py --- a/molecule/testinfra/staging/app/test_appenv.py +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -16,11 +16,11 @@ def test_app_wsgi(host): f = host.file("/var/www/source.wsgi") with host.sudo(): assert f.is_file - assert oct(f.mode) == "0640" + assert f.mode == 0o640 assert f.user == 'www-data' assert f.group == 'www-data' assert f.contains("^import logging$") - assert f.contains("^logging\.basicConfig(stream=sys\.stderr)$") + assert f.contains(r"^logging\.basicConfig(stream=sys\.stderr)$") def test_pidfile(host): @@ -37,7 +37,7 @@ def test_app_directories(host, app_dir): assert f.is_directory assert f.user == sdvars.securedrop_user assert f.group == sdvars.securedrop_user - assert oct(f.mode) == "0700" + assert f.mode == 0o700 def test_app_code_pkg(host): @@ -57,7 +57,7 @@ def test_ensure_logo(host): """ ensure default logo header file exists """ f = host.file("{}/static/i/logo.png".format(sdvars.securedrop_code)) with host.sudo(): - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.user == sdvars.securedrop_user assert f.group == sdvars.securedrop_user @@ -77,4 +77,4 @@ def test_app_workerlog_dir(host): assert f.is_directory assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 diff --git a/molecule/testinfra/staging/app/test_ossec_agent.py b/molecule/testinfra/staging/app/test_ossec_agent.py --- a/molecule/testinfra/staging/app/test_ossec_agent.py +++ b/molecule/testinfra/staging/app/test_ossec_agent.py @@ -13,8 +13,8 @@ def test_hosts_files(host): mon_ip = os.environ.get('MON_IP', sdvars.mon_ip) mon_host = sdvars.monitor_hostname - assert f.contains('^127.0.0.1\s*localhost') - assert f.contains('^{}\s*{}\s*securedrop-monitor-server-alias$'.format( + assert f.contains(r'^127.0.0.1\s*localhost') + assert f.contains(r'^{}\s*{}\s*securedrop-monitor-server-alias$'.format( mon_ip, mon_host)) @@ -41,7 +41,7 @@ def test_ossec_keyfile_present(host): with host.sudo(): f = host.file("/var/ossec/etc/client.keys") assert f.exists - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.user == "root" assert f.group == "ossec" assert f.content_string diff --git a/molecule/testinfra/staging/app/test_paxctld.py b/molecule/testinfra/staging/app/test_paxctld.py --- a/molecule/testinfra/staging/app/test_paxctld.py +++ b/molecule/testinfra/staging/app/test_paxctld.py @@ -25,8 +25,8 @@ def test_paxctld_config(host): # Only relevant to Xenial installs if host.system_info.codename == "xenial": assert f.is_file - regex = "^/usr/sbin/apache2\s+m$" - assert re.search(regex, f.content, re.M) + regex = r"^/usr/sbin/apache2\s+m$" + assert re.search(regex, f.content_string, re.M) def test_paxctld_service(host): diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -43,7 +43,7 @@ def test_tor_torrc_options(host, torrc_option): f = host.file("/etc/tor/torrc") assert f.is_file assert f.user == "debian-tor" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains("^{}$".format(torrc_option)) diff --git a/molecule/testinfra/staging/app/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/molecule/testinfra/staging/app/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -14,7 +14,7 @@ def test_tor_service_directories(host, tor_service): with host.sudo(): f = host.file("/var/lib/tor/services/{}".format(tor_service['name'])) assert f.is_directory - assert oct(f.mode) == "0700" + assert f.mode == 0o700 assert f.user == "debian-tor" assert f.group == "debian-tor" @@ -28,18 +28,18 @@ def test_tor_service_hostnames(host, tor_service): """ # Declare regex only for THS; we'll build regex for ATHS only if # necessary, since we won't have the required values otherwise. - ths_hostname_regex = "[a-z0-9]{16}\.onion" + ths_hostname_regex = r"[a-z0-9]{16}\.onion" with host.sudo(): f = host.file("/var/lib/tor/services/{}/hostname".format( tor_service['name'])) assert f.is_file - assert oct(f.mode) == "0600" + assert f.mode == 0o600 assert f.user == "debian-tor" assert f.group == "debian-tor" # All hostnames should contain at *least* the hostname. - assert re.search(ths_hostname_regex, f.content) + assert re.search(ths_hostname_regex, f.content_string) if tor_service['authenticated']: # HidServAuth regex is approximately [a-zA-Z0-9/+], but validating @@ -47,9 +47,9 @@ def test_tor_service_hostnames(host, tor_service): # charset. aths_hostname_regex = ths_hostname_regex + " .{22} # client: " + \ tor_service['client'] - assert re.search("^{}$".format(aths_hostname_regex), f.content) + assert re.search("^{}$".format(aths_hostname_regex), f.content_string) else: - assert re.search("^{}$".format(ths_hostname_regex), f.content) + assert re.search("^{}$".format(ths_hostname_regex), f.content_string) @pytest.mark.parametrize('tor_service', sdvars.tor_services) diff --git a/molecule/testinfra/staging/common/test_cron_apt.py b/molecule/testinfra/staging/common/test_cron_apt.py --- a/molecule/testinfra/staging/common/test_cron_apt.py +++ b/molecule/testinfra/staging/common/test_cron_apt.py @@ -30,7 +30,7 @@ def test_cron_apt_config(host): f = host.file('/etc/cron-apt/config') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains('^SYSLOGON="always"$') assert f.contains('^EXITON=error$') @@ -54,7 +54,7 @@ def test_cron_apt_repo_list(host, repo): f = host.file('/etc/apt/security.list') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 repo_regex = '^{}$'.format(re.escape(repo_config)) assert f.contains(repo_regex) @@ -67,7 +67,7 @@ def test_cron_apt_repo_config_update(host): f = host.file('/etc/cron-apt/action.d/0-update') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 repo_config = str('update -o quiet=2' ' -o Dir::Etc::SourceList=/etc/apt/security.list' ' -o Dir::Etc::SourceParts=""') @@ -82,7 +82,7 @@ def test_cron_apt_delete_vanilla_kernels(host): f = host.file('/etc/cron-apt/action.d/9-remove') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 command = str('remove -y' ' linux-image-generic-lts-xenial linux-image-.*generic' ' -o quiet=2') @@ -96,7 +96,7 @@ def test_cron_apt_repo_config_upgrade(host): f = host.file('/etc/cron-apt/action.d/5-security') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains('^autoclean -y$') repo_config = str('dist-upgrade -y -o APT::Get::Show-Upgraded=true' ' -o Dir::Etc::SourceList=/etc/apt/security.list' @@ -130,7 +130,7 @@ def test_cron_apt_cron_jobs(host, cron_job): f = host.file('/etc/cron.d/cron-apt') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex_job = '^{}$'.format(re.escape(cron_job['job'])) if cron_job['state'] == 'present': diff --git a/molecule/testinfra/staging/common/test_fpf_apt_repo.py b/molecule/testinfra/staging/common/test_fpf_apt_repo.py --- a/molecule/testinfra/staging/common/test_fpf_apt_repo.py +++ b/molecule/testinfra/staging/common/test_fpf_apt_repo.py @@ -26,7 +26,7 @@ def test_fpf_apt_repo_present(host): f = host.file('/etc/apt/sources.list.d/apt_test_freedom_press.list') else: f = host.file('/etc/apt/sources.list.d/apt_freedom_press.list') - repo_regex = '^deb \[arch=amd64\] {} {} main$'.format( + repo_regex = r'^deb \[arch=amd64\] {} {} main$'.format( re.escape(test_vars.fpf_apt_repo_url), re.escape(host.system_info.codename)) assert f.contains(repo_regex) diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -12,7 +12,7 @@ def test_ssh_motd_disabled(host): """ f = host.file("/etc/pam.d/sshd") assert f.is_file - assert not f.contains("pam\.motd") + assert not f.contains(r"pam\.motd") @pytest.mark.parametrize("package", [ @@ -61,7 +61,7 @@ def test_grsecurity_lock_file(host): `grsec_lock` file, which is automatically created by grsecurity. """ f = host.file("/proc/sys/kernel/grsecurity/grsec_lock") - assert oct(f.mode) == "0600" + assert f.mode == 0o600 assert f.user == "root" assert f.size == 0 @@ -119,7 +119,7 @@ def test_grsecurity_paxtest(host, paxtest_check): c = host.run("paxtest blackhat") assert c.rc == 0 assert "Vulnerable" not in c.stdout - regex = "^{}\s*:\sKilled$".format(re.escape(paxtest_check)) + regex = r"^{}\s*:\sKilled$".format(re.escape(paxtest_check)) assert re.search(regex, c.stdout) diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py --- a/molecule/testinfra/staging/common/test_system_hardening.py +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -41,8 +41,8 @@ def test_dns_setting(host): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" - assert f.contains('^nameserver 8\.8\.8\.8$') + assert f.mode == 0o644 + assert f.contains(r'^nameserver 8\.8\.8\.8$') @pytest.mark.parametrize('kernel_module', [ diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -11,24 +11,24 @@ def test_sudoers_config(host): assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0440" + assert f.mode == 0o440 # Restrictive file mode requires sudo for reading, so let's # read once and store the content in a var. with host.sudo(): - sudoers_config = f.content + sudoers_config = f.content_string # Using re.search rather than `f.contains` since the basic grep # matching doesn't support PCRE, so `\s` won't work. - assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M) - assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M) - assert re.search('^Defaults\s+mail_badpass$', sudoers_config, re.M) - assert re.search('Defaults\s+secure_path="/usr/local/sbin:' - '/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', + assert re.search(r'^Defaults\s+env_reset$', sudoers_config, re.M) + assert re.search(r'^Defaults\s+env_reset$', sudoers_config, re.M) + assert re.search(r'^Defaults\s+mail_badpass$', sudoers_config, re.M) + assert re.search(r'Defaults\s+secure_path="/usr/local/sbin:' + r'/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', sudoers_config, re.M) - assert re.search('^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', + assert re.search(r'^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', sudoers_config, re.M) - assert re.search('Defaults:%sudo\s+!requiretty', sudoers_config, re.M) + assert re.search(r'Defaults:%sudo\s+!requiretty', sudoers_config, re.M) def test_sudoers_tmux_env(host): @@ -91,4 +91,4 @@ def test_sudoers_tmux_env_deprecated(host): admin_user = "vagrant" f = host.file("/home/{}/.bashrc".format(admin_user)) - assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$") + assert not f.contains(r"^. \/etc\/bashrc\.securedrop_additions$") diff --git a/molecule/testinfra/staging/mon/test_mon_network.py b/molecule/testinfra/staging/mon/test_mon_network.py --- a/molecule/testinfra/staging/mon/test_mon_network.py +++ b/molecule/testinfra/staging/mon/test_mon_network.py @@ -22,7 +22,7 @@ def test_mon_iptables_rules(host): dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters - iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" + iptables = r"iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" environment = os.environ.get("CI_SD_ENV", "staging") iptables_file = "{}/iptables-mon-{}.j2".format( os.path.dirname(os.path.abspath(__file__)), diff --git a/molecule/testinfra/staging/mon/test_ossec_server.py b/molecule/testinfra/staging/mon/test_ossec_server.py --- a/molecule/testinfra/staging/mon/test_ossec_server.py +++ b/molecule/testinfra/staging/mon/test_ossec_server.py @@ -39,7 +39,7 @@ def test_ossec_keyfiles(host, keyfile): assert f.is_file # The postinst scripts in the OSSEC deb packages set 440 on the # keyfiles; the Ansible config should be updated to do the same. - assert oct(f.mode) == "0440" + assert f.mode == 0o440 assert f.user == "root" assert f.group == "ossec" @@ -56,7 +56,7 @@ def test_procmail_log(host): assert f.is_file assert f.user == "ossec" assert f.group == "root" - assert oct(f.mode) == "0660" + assert f.mode == 0o660 def test_ossec_authd(host): @@ -75,7 +75,7 @@ def test_hosts_files(host): app_host = securedrop_test_vars.app_hostname assert f.contains('^127.0.0.1.*localhost') - assert f.contains('^{}\s*{}$'.format(app_ip, app_host)) + assert f.contains(r'^{}\s*{}$'.format(app_ip, app_host)) def test_ossec_log_contains_no_malformed_events(host): diff --git a/molecule/testinfra/staging/mon/test_postfix.py b/molecule/testinfra/staging/mon/test_postfix.py --- a/molecule/testinfra/staging/mon/test_postfix.py +++ b/molecule/testinfra/staging/mon/test_postfix.py @@ -21,9 +21,9 @@ def test_postfix_headers(host, header): """ f = host.file("/etc/postfix/header_checks") assert f.is_file - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex = '^{}$'.format(re.escape(header)) - assert re.search(regex, f.content, re.M) + assert re.search(regex, f.content_string, re.M) def test_postfix_generic_maps(host): diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -12,10 +12,7 @@ import signal import subprocess -try: - import configparser -except ImportError: - from six.moves import configparser # renamed in Python 3 +from six.moves import configparser from flask import url_for from pyotp import TOTP diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -90,7 +90,7 @@ def test_submit_message(source_app, journalist_app, test_journo): submission_url = soup.select('ul#submissions li a')[0]['href'] assert "-msg" in submission_url span = soup.select('ul#submissions li span.info span')[0] - assert re.compile('\d+ bytes').match(span['title']) + assert re.compile(r'\d+ bytes').match(span['title']) resp = app.get(submission_url) assert resp.status_code == 200 @@ -143,7 +143,7 @@ def assertion(): assert not ( os.path.exists(current_app.storage.path(filesystem_id, doc_name))) - utils.async.wait_for_assertion(assertion) + utils.asynchronous.wait_for_assertion(assertion) def test_submit_file(source_app, journalist_app, test_journo): @@ -186,7 +186,7 @@ def test_submit_file(source_app, journalist_app, test_journo): submission_url = soup.select('ul#submissions li a')[0]['href'] assert "-doc" in submission_url span = soup.select('ul#submissions li span.info span')[0] - assert re.compile('\d+ bytes').match(span['title']) + assert re.compile(r'\d+ bytes').match(span['title']) resp = app.get(submission_url) assert resp.status_code == 200 @@ -246,7 +246,7 @@ def assertion(): assert not ( os.path.exists(current_app.storage.path(filesystem_id, doc_name))) - utils.async.wait_for_assertion(assertion) + utils.asynchronous.wait_for_assertion(assertion) def _helper_test_reply(journalist_app, source_app, config, test_journo, @@ -303,7 +303,7 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, # Block up to 15s for the reply keypair, so we can test sending a reply def assertion(): assert current_app.crypto_util.getkey(filesystem_id) is not None - utils.async.wait_for_assertion(assertion, 15) + utils.asynchronous.wait_for_assertion(assertion, 15) # Create 2 replies to test deleting on journalist and source interface with journalist_app.test_client() as app: @@ -411,7 +411,7 @@ def assertion(): assert not any([os.path.exists(current_app.storage.path(filesystem_id, doc_name)) for doc_name in checkbox_values]) - utils.async.wait_for_assertion(assertion) + utils.asynchronous.wait_for_assertion(assertion) def _can_decrypt_with_key(journalist_app, msg, passphrase=None): @@ -511,7 +511,7 @@ def test_delete_collection(mocker, source_app, journalist_app, test_journo): def assertion(): assert not os.path.exists(current_app.storage.path(filesystem_id)) - utils.async.wait_for_assertion(assertion) + utils.asynchronous.wait_for_assertion(assertion) def test_delete_collections(mocker, journalist_app, source_app, test_journo): @@ -554,7 +554,7 @@ def assertion(): any([os.path.exists(current_app.storage.path(filesystem_id)) for filesystem_id in checkbox_values])) - utils.async.wait_for_assertion(assertion) + utils.asynchronous.wait_for_assertion(assertion) def _helper_filenames_submit(app): diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -31,7 +31,7 @@ from .utils.instrument import InstrumentedApp # Smugly seed the RNG for deterministic testing -random.seed('¯\_(ツ)_/¯') +random.seed(r'¯\_(ツ)_/¯') VALID_PASSWORD = 'correct horse battery staple generic passphrase hooray' VALID_PASSWORD_2 = 'another correct horse battery staple generic passphrase' @@ -1668,7 +1668,7 @@ def test_delete_source_deletes_docs_on_disk(journalist_app, test_source['filesystem_id']) # Wait up to 5s to wait for Redis worker `srm` operation to complete - utils.async.wait_for_redis_worker(job) + utils.asynchronous.wait_for_redis_worker(job) # Encrypted documents no longer exist assert not os.path.exists(dir_source_docs) diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -243,7 +243,7 @@ def test_async_add_checksum_for_file(config, db_model): job = async_add_checksum_for_file(db_obj) - utils.async.wait_for_redis_worker(job, timeout=5) + utils.asynchronous.wait_for_redis_worker(job, timeout=5) with app.app_context(): # requery to get a new object diff --git a/securedrop/tests/utils/__init__.py b/securedrop/tests/utils/__init__.py --- a/securedrop/tests/utils/__init__.py +++ b/securedrop/tests/utils/__init__.py @@ -3,7 +3,7 @@ from flask import g from pyotp import TOTP -from . import async # noqa +from . import asynchronous # noqa from . import db_helper # noqa from . import env # noqa diff --git a/securedrop/tests/utils/api_helper.py b/securedrop/tests/utils/api_helper.py --- a/securedrop/tests/utils/api_helper.py +++ b/securedrop/tests/utils/api_helper.py @@ -1,11 +1,8 @@ -def get_api_headers(token=''): - if token: - return { - 'Authorization': 'Token {}'.format(token), - 'Accept': 'application/json', - 'Content-Type': 'application/json' - } +def get_api_headers(token=""): + if token: return { - 'Accept': 'application/json', - 'Content-Type': 'application/json' + "Authorization": "Token {}".format(token), + "Accept": "application/json", + "Content-Type": "application/json", } + return {"Accept": "application/json", "Content-Type": "application/json"} diff --git a/securedrop/tests/utils/async.py b/securedrop/tests/utils/asynchronous.py similarity index 100% rename from securedrop/tests/utils/async.py rename to securedrop/tests/utils/asynchronous.py diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py --- a/securedrop/tests/utils/env.py +++ b/securedrop/tests/utils/env.py @@ -89,5 +89,5 @@ def teardown(): # safeguard for #844 assert not os.path.exists(config.SECUREDROP_DATA_ROOT) except OSError as exc: - if 'No such file or directory' not in exc: + if 'No such file or directory' != exc.strerror: raise diff --git a/securedrop/tests/utils/instrument.py b/securedrop/tests/utils/instrument.py --- a/securedrop/tests/utils/instrument.py +++ b/securedrop/tests/utils/instrument.py @@ -9,11 +9,7 @@ """ -try: - from urllib.parse import urlparse, urljoin -except ImportError: - # Python 2 urlparse fallback - from urlparse import urlparse, urljoin +from six.moves.urllib.parse import urlparse, urljoin import pytest
Keep two different versions of requirements.txt file for Python2 and Python3 ## Description `pip-compile` creates different output for `requirements.txt` for Python2 and in Python3. Which in turn creates problems in installing those dependencies in the different versions. ## Probable solution Have two different requirements files for Python2 and Python3, for both dev and tests.
2019-06-20T00:54:36Z
[]
[]
freedomofpress/securedrop
4,622
freedomofpress__securedrop-4622
[ "4355" ]
5eaad3b04074729a04350471e2ed94205e8e3c68
diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py --- a/securedrop/i18n_tool.py +++ b/securedrop/i18n_tool.py @@ -4,7 +4,6 @@ from __future__ import print_function import argparse import io -import six import logging import os import glob @@ -61,7 +60,7 @@ def file_is_modified(self, path): def ensure_i18n_remote(self, args): k = {'_cwd': args.root} - if six.b('i18n') not in git.remote(**k).stdout: + if b'i18n' not in git.remote(**k).stdout: git.remote.add('i18n', args.url, **k) git.fetch('i18n', **k) diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -7,6 +7,7 @@ from flask_babel import gettext from flask_wtf.csrf import CSRFProtect, CSRFError from os import path +import sys from werkzeug.exceptions import default_exceptions import i18n @@ -141,6 +142,17 @@ def setup_g(): 'SESSION_EXPIRATION_MINUTES', 120)) + # Work around https://github.com/lepture/flask-wtf/issues/275 + # -- after upgrading from Python 2 to Python 3, any existing + # session's csrf_token value will be retrieved as bytes, + # causing a TypeError. This simple fix, deleting the existing + # token, was suggested in the issue comments. This code will + # be safe to remove after Python 2 reaches EOL in 2020, and no + # supported SecureDrop installations can still have this + # problem. + if sys.version_info.major > 2 and type(session.get('csrf_token')) is bytes: + del session['csrf_token'] + uid = session.get('uid', None) if uid: g.user = Journalist.query.get(uid) diff --git a/setup.py b/setup.py new file mode 100644 --- /dev/null +++ b/setup.py @@ -0,0 +1,24 @@ +import setuptools + +long_description = "The SecureDrop whistleblower platform." + +setuptools.setup( + name="securedrop-app-code", + version="1.0.0~rc1", + author="Freedom of the Press Foundation", + author_email="[email protected]", + description="SecureDrop Server", + long_description=long_description, + long_description_content_type="text/markdown", + license="GPLv3+", + python_requires=">=3.5", + url="https://github.com/freedomofpress/securedrop", + classifiers=( + "Development Status :: 5 - Stable", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + ), +)
diff --git a/molecule/builder-xenial/tests/test_build_dependencies.py b/molecule/builder-xenial/tests/test_build_dependencies.py --- a/molecule/builder-xenial/tests/test_build_dependencies.py +++ b/molecule/builder-xenial/tests/test_build_dependencies.py @@ -8,16 +8,6 @@ ] -def test_pip_wheel_installed(host): - """ - Ensure `wheel` is installed via pip, for packaging Python - dependencies into a Debian package. - """ - c = host.run("pip list installed") - assert "wheel" in c.stdout - assert c.rc == 0 - - def test_sass_gem_installed(host): """ Ensure the `sass` Ruby gem is installed, for compiling SASS to CSS. @@ -31,7 +21,7 @@ def test_pip_dependencies_installed(host): """ Ensure the development pip dependencies are installed """ - c = host.run("pip list installed") + c = host.run("pip3 list installed") assert "Flask-Babel" in c.stdout assert c.rc == 0 diff --git a/molecule/builder-xenial/tests/test_securedrop_deb_package.py b/molecule/builder-xenial/tests/test_securedrop_deb_package.py --- a/molecule/builder-xenial/tests/test_securedrop_deb_package.py +++ b/molecule/builder-xenial/tests/test_securedrop_deb_package.py @@ -164,7 +164,7 @@ def test_deb_package_contains_no_config_file(host, deb): deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) c = host.run("dpkg-deb --contents {}".format(deb_package.path)) - assert not re.search(r"^.*/config\.py$", c.stdout, re.M) + assert not re.search(r"^ ./var/www/securedrop/config.py$", c.stdout, re.M) @pytest.mark.parametrize("deb", deb_packages) @@ -198,9 +198,9 @@ def test_deb_package_contains_mo_file(host, deb): @pytest.mark.parametrize("deb", deb_packages) def test_deb_package_contains_no_generated_assets(host, deb): """ - Ensures the `securedrop-app-code` package does not ship a minified - static assets, which are built automatically via Flask-Assets, and may be - present in the source directory used to build from. + Ensures the `securedrop-app-code` package does not ship minified + static assets, which are built automatically via Flask-Assets, and + may be present in the source directory used to build from. """ deb_package = host.file(deb.format( securedrop_test_vars.securedrop_version)) @@ -223,7 +223,7 @@ def test_deb_package_contains_no_generated_assets(host, deb): "/static/.webassets-cache/.+$", c.stdout, re.M) # no SASS files should exist; only the generated CSS files. - assert not re.search("^.*sass.*$", c.stdout, re.M) + assert not re.search("^.*sass$", c.stdout, re.M) # no .map files should exist; only the generated CSS files. assert not re.search("^.*css.map$", c.stdout, re.M) diff --git a/molecule/testinfra/staging/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py --- a/molecule/testinfra/staging/app-code/test_redis_worker.py +++ b/molecule/testinfra/staging/app-code/test_redis_worker.py @@ -8,7 +8,7 @@ @pytest.mark.parametrize('config_line', [ '[program:securedrop_worker]', - 'command=/usr/local/bin/rqworker', + 'command=/opt/venvs/securedrop-app-code/bin/rqworker', "directory={}".format(securedrop_test_vars.securedrop_code), 'autostart=true', 'autorestart=true', @@ -16,7 +16,6 @@ 'stderr_logfile=/var/log/securedrop_worker/err.log', 'stdout_logfile=/var/log/securedrop_worker/out.log', "user={}".format(securedrop_test_vars.securedrop_user), - 'environment=HOME="/tmp/python-gnupg"', ]) def test_redis_worker_configuration(host, config_line): """ diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -45,7 +45,7 @@ def test_securedrop_application_test_locale(host): assert securedrop_config.contains("^DEFAULT_LOCALE") assert securedrop_config.content_string.count("DEFAULT_LOCALE") == 1 assert securedrop_config.content_string.count("SUPPORTED_LOCALES") == 1 - assert "\nSUPPORTED_LOCALES = [u'el', u'ar', 'en_US']\n" in securedrop_config.content_string + assert "\nSUPPORTED_LOCALES = ['el', 'ar', 'en_US']\n" in securedrop_config.content_string def test_securedrop_application_test_journalist_key(host): diff --git a/molecule/testinfra/staging/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/molecule/testinfra/staging/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -6,7 +6,6 @@ @pytest.mark.parametrize("package", [ - "libapache2-mod-wsgi", "libapache2-mod-xsendfile", ]) def test_apache_apt_packages(host, package): diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py --- a/molecule/testinfra/staging/app/test_appenv.py +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -1,13 +1,16 @@ +import os.path import pytest testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars +sdbin = "/opt/venvs/securedrop-app-code/bin" + @pytest.mark.parametrize('exp_pip_pkg', sdvars.pip_deps) def test_app_pip_deps(host, exp_pip_pkg): """ Ensure pip dependencies are installed """ - pip = host.pip_package.get_packages() + pip = host.pip_package.get_packages(pip_path=os.path.join(sdbin, "pip")) assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version'] @@ -77,4 +80,4 @@ def test_app_workerlog_dir(host): assert f.is_directory assert f.user == "root" assert f.group == "root" - assert f.mode == 0o644 + assert f.mode == 0o700
Create securedrop-app-code Debian package with Python 3 dependencies Now that we have basic Python 3 compatibility (#997, resolved through #4239), we need to ensure that all the `securedrop-app-code` dependencies transition to Python 3 as well. The plan of record is to use `dh-virtualenv` to ship these dependencies in the package itself, instead of depending on the system Python libraries. This will allow us to use consistent dependency management processes for SecureDrop Workstation and SecureDrop core, and will get us closer to resolving #3407.
For the 4/17-5/1 sprint, Kushal will prepare at least a draft PR for broader team input, which will not use FPF's PyPI mirror yet. hey @kushaldas what is the status of this? can you push up whatever wip you have when you get a chance @kushaldas ? I did not manage to work on this enough during that sprint, but, I have other WIP work on my backup system (a `make clean` removed from my laptop during the last trip). I will push it here with details of things needs to be done. please push up whatever wip you have locally so @rmol can check it out since y'all will be working together next sprint to get the combined `mod_wsgi` changes and this issue merged together.
2019-07-17T18:39:28Z
[]
[]
freedomofpress/securedrop
4,644
freedomofpress__securedrop-4644
[ "4644" ]
20cbd52cb40ce1cef09407f81fc338bf6765aaf9
diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py --- a/install_files/ansible-base/roles/backup/files/0.3_collect.py +++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py @@ -39,7 +39,7 @@ def collect_custom_header_image(zf): def collect_tor_files(zf): - # All of the tor hidden service private keys are stored in the THS specific + # All of the tor Onion Service private keys are stored in the THS specific # subdirectory `/var/lib/tor/services` backing up this directory will back # up all of the THS and ATHS required keys needed to restore all the hidden # services on that system.
diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -38,7 +38,7 @@ def test_tor_torrc_options(host, torrc_option): These options should be present regardless of machine role, meaning both Application and Monitor server will have them. - Separate tests will check for specific hidden services. + Separate tests will check for specific Onion Services. """ f = host.file("/etc/tor/torrc") assert f.is_file diff --git a/molecule/testinfra/staging/app/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/molecule/testinfra/staging/app/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -22,9 +22,9 @@ def test_tor_service_directories(host, tor_service): @pytest.mark.parametrize('tor_service', sdvars.tor_services) def test_tor_service_hostnames(host, tor_service): """ - Check contents of tor service hostname file. For normal Hidden Services, + Check contents of tor service hostname file. For normal Onion Services, the file should contain only hostname (.onion URL). For Authenticated - Hidden Services, it should also contain the HidServAuth cookie. + Onion Services, it should also contain the HidServAuth cookie. """ # Declare regex only for THS; we'll build regex for ATHS only if # necessary, since we won't have the required values otherwise. @@ -56,12 +56,12 @@ def test_tor_service_hostnames(host, tor_service): def test_tor_services_config(host, tor_service): """ Ensure torrc file contains relevant lines for Hidden Service declarations. - All hidden services must include: + All Onion Services must include: * HiddenServiceDir * HiddenServicePort - Only authenticated hidden services must also include: + Only authenticated Onion Services must also include: * HiddenServiceAuthorizeClient
replace "hidden service" occurrences ## Status ready for review ## Description of Changes Changes Proposed: - no longer refer to [Onion Services](https://2019.www.torproject.org/docs/onion-services.html.en) as hidden services; - there are NO new images I added, it's just text; - all changed content here is either just a comment (playbook, or shell script); - changelog was kept as is. ## Testing I followed the _(slightly outdated)_ [Documentation Guidelines](https://docs.securedrop.org/en/latest/development/documentation_guidelines.html), and all looked fine: ``` # make docs ``` Gave me the following: ``` ... | copying static files... done | copying extra files... done | dumping search index in English (code: en) ... done | dumping object inventory... done | build succeeded. +-------------------------------------------------------------------------------- [I 190725 16:16:16 server:296] Serving on http://127.0.0.1:8000 [I 190725 16:16:16 handlers:62] Start watching changes [I 190725 16:16:16 handlers:64] Start detecting changes ``` `make docs-linkcheck` returned an error, but that's not related to the changes made here. `docs-lint` ran just fine. ## Deployment Any special considerations for deployment? - AFAIK, no. ## Checklist ### If you made changes to the server application code: - [ ] Linting (`make lint`) and tests (`make -C securedrop test`) pass in the development container ### If you made changes to `securedrop-admin`: - [ ] Linting and tests (`make -C admin test`) pass in the admin development container ### If you made changes to the system configuration: - [ ] [Configuration tests](https://docs.securedrop.org/en/latest/development/testing_configuration_tests.html) pass ### If you made non-trivial code changes: - [ ] I have written a test plan and validated it for this PR ### If you made changes to documentation: - [x] Doc linting (`make docs-lint`) passed locally
2019-07-25T14:26:58Z
[]
[]
freedomofpress/securedrop
4,648
freedomofpress__securedrop-4648
[ "4627" ]
37347bb8b0c8bcb863054d895c4cd64ca1cc6975
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -409,11 +409,33 @@ def load_and_update_config(self): def update_config(self): self.config.update(self.user_prompt_config()) + self.update_onion_version_config() self.save() self.validate_gpg_keys() self.validate_journalist_alert_email() return True + def update_onion_version_config(self): + """ + This method updates onion service related configurations. + """ + v2 = False + v3 = True + source_ths = os.path.join(self.args.ansible_path, "app-source-ths") + if os.path.exists(source_ths): # Means old installation + data = "" + with open(source_ths) as fobj: + data = fobj.read() + + data = data.strip() + if len(data) < 56: # Old v2 onion address + v2 = True + + # Now update the configuration + config = {"v2_onion_services": v2, + "v3_onion_services": v3} + self.config.update(config) + def user_prompt_config(self): config = {} for desc in self.desc:
diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -44,6 +44,8 @@ smtp_relay: smtp.gmail.com smtp_relay_port: 587 ssh_users: sd +v2_onion_services: false +v3_onion_services: true ''' JOURNALIST_ALERT_OUTPUT = '''app_hostname: app @@ -74,6 +76,8 @@ smtp_relay: smtp.gmail.com smtp_relay_port: 587 ssh_users: sd +v2_onion_services: false +v3_onion_services: true ''' HTTPS_OUTPUT = '''app_hostname: app @@ -104,6 +108,8 @@ smtp_relay: smtp.gmail.com smtp_relay_port: 587 ssh_users: sd +v2_onion_services: false +v3_onion_services: true ''' diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -18,6 +18,7 @@ # import io +import os import argparse from flaky import flaky from os.path import dirname, join, basename, exists @@ -617,6 +618,66 @@ def test_save(self, tmpdir): """) assert expected == io.open(site_config_path).read() + def test_old_v2_onion_services(self, tmpdir): + "Checks for exitsing v2 source address" + site_config_path = join(str(tmpdir), 'site_config') + args = argparse.Namespace(site_config=site_config_path, + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with open("app-source-ths", "w") as fobj: + fobj.write("aaaaaaaaaaaaaaaa.onion\n") + site_config.update_onion_version_config() + site_config.save() + data = "" + with open(site_config_path) as fobj: + data = fobj.read() + expected = textwrap.dedent("""\ + v2_onion_services: true + v3_onion_services: true + """) + os.remove("app-source-ths") + assert expected == data + + def test_no_v2_onion_services(self, tmpdir): + "Checks for new installation for only v3" + site_config_path = join(str(tmpdir), 'site_config') + args = argparse.Namespace(site_config=site_config_path, + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.update_onion_version_config() + site_config.save() + data = "" + with open(site_config_path) as fobj: + data = fobj.read() + expected = textwrap.dedent("""\ + v2_onion_services: false + v3_onion_services: true + """) + assert expected == data + + def test_only_v3_onion_services(self, tmpdir): + "Checks for new installation for only v3 ths file" + site_config_path = join(str(tmpdir), 'site_config') + args = argparse.Namespace(site_config=site_config_path, + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with open("app-source-ths", "w") as fobj: + fobj.write("a" * 56 + ".onion\n") + site_config.update_onion_version_config() + site_config.save() + data = "" + with open(site_config_path) as fobj: + data = fobj.read() + expected = textwrap.dedent("""\ + v2_onion_services: false + v3_onion_services: true + """) + os.remove("app-source-ths") + assert expected == data + def test_validate_gpg_key(self, caplog): args = argparse.Namespace(site_config='INVALID', ansible_path='tests/files',
[v3 onion migration] securedrop-admin sdconfig should set v2_onion_services and v3_onion_services variables ## Description We should enable the setting of two new boolean configuration variables in `site-specific` by `securedrop-admin sdconfig`: `v2_onion_services` and `v3_onion_services`. The expected behavior here is: * The default value of the `v2_onion_services` variable is set to: - `True` if there is a ths file (`app-ths-source`) files with a 16 char onion address in `install_files/ansible-base/` - this is to prevent admins from accidentally disabling v2 onion services - `False` otherwise - we don't want new installs to be using v2 services beginning in SecureDrop 1.0.0 * The default value of the `v3_onion_services` variable is set to `True`: - we want new SecureDrop instances to _only_ use v3 services beginning in SecureDrop 1.0.0 and we want _existing_ installs to enable v3 alongside v2 so they can begin transitioning users to v3. Note that we may need to do another iteration to improve the messaging to admins / point to some overview docs (to be written) in `docs.securedrop.org` explaining the migration process. This ticket is primarily just for the functionality of setting the proper defaults (the docs and messaging in sdconfig should also communicate that users using HTTPS on the source interface will need to get a new certificate including the v3 interface before advertising source users to use the v3 onion service). Subticket of #2951
2019-07-26T12:32:47Z
[]
[]
freedomofpress/securedrop
4,652
freedomofpress__securedrop-4652
[ "4667" ]
a9087a2d42f2cb6a645adbb6d09f204739c4a5eb
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -33,10 +33,14 @@ import subprocess import sys import types +import json +import base64 import prompt_toolkit from prompt_toolkit.validation import Validator, ValidationError import yaml from pkg_resources import parse_version +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import x25519 sdlog = logging.getLogger(__name__) RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77' @@ -566,6 +570,73 @@ def sdconfig(args): return 0 +def generate_new_v3_keys(): + """This function generate new keys for Tor v3 onion + services and returns them as as tuple. + + :returns: Tuple(public_key, private_key) + """ + + private_key = x25519.X25519PrivateKey.generate() + private_bytes = private_key.private_bytes( + encoding=serialization.Encoding.Raw , + format=serialization.PrivateFormat.Raw, + encryption_algorithm=serialization.NoEncryption()) + public_key = private_key.public_key() + public_bytes = public_key.public_bytes( + encoding=serialization.Encoding.Raw, + format=serialization.PublicFormat.Raw) + + # Base32 encode and remove base32 padding characters (`=`) + # Using try/except blocks for Python 2/3 support. + try: + public = base64.b32encode(public_bytes).replace('=', '') \ + .decode("utf-8") + except TypeError: + public = base64.b32encode(public_bytes).replace(b'=', b'') \ + .decode("utf-8") + try: + private = base64.b32encode(private_bytes).replace('=', '') \ + .decode("utf-8") + except TypeError: + private = base64.b32encode(private_bytes).replace(b'=', b'') \ + .decode("utf-8") + return public, private + + +def find_or_generate_new_torv3_keys(args): + """ + This method will either read v3 Tor onion service keys if found or generate + a new public/private keypair. + """ + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + if os.path.exists(secret_key_path): + print('Tor v3 onion service keys already exist in: {}'.format( + secret_key_path)) + return 0 + # No old keys, generate and store them first + app_journalist_public_key, \ + app_journalist_private_key = generate_new_v3_keys() + # For app ssh service + app_ssh_public_key, app_ssh_private_key = generate_new_v3_keys() + # For mon ssh service + mon_ssh_public_key, mon_ssh_private_key = generate_new_v3_keys() + tor_v3_service_info = { + "app_journalist_public_key": app_journalist_public_key, + "app_journalist_private_key": app_journalist_private_key, + "app_ssh_public_key": app_ssh_public_key, + "app_ssh_private_key": app_ssh_private_key, + "mon_ssh_public_key": mon_ssh_public_key, + "mon_ssh_private_key": mon_ssh_private_key, + } + with open(secret_key_path, 'w') as fobj: + json.dump(tor_v3_service_info, fobj, indent=4) + print('Tor v3 onion service keys generated and stored in: {}'.format( + secret_key_path)) + return 0 + + def install_securedrop(args): """Install/Update SecureDrop""" SiteConfig(args).load() @@ -827,6 +898,11 @@ class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter, help=run_tails_config.__doc__) parse_tailsconfig.set_defaults(func=run_tails_config) + parse_generate_tor_keys = subparsers.add_parser( + 'generate_v3_keys', + help=find_or_generate_new_torv3_keys.__doc__) + parse_generate_tor_keys.set_defaults(func=find_or_generate_new_torv3_keys) + parse_backup = subparsers.add_parser('backup', help=backup_securedrop.__doc__) parse_backup.set_defaults(func=backup_securedrop)
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -22,6 +22,7 @@ import argparse from flaky import flaky from os.path import dirname, join, basename, exists +import json import mock from prompt_toolkit.validation import ValidationError import pytest @@ -1008,3 +1009,58 @@ def test_load(self, caplog): with pytest.raises(yaml.YAMLError) as e: site_config.load() assert 'issue processing' in caplog.text + + +def test_generate_new_v3_keys(): + public, private = securedrop_admin.generate_new_v3_keys() + + for key in [public, private]: + # base32 padding characters should be removed + assert '=' not in key + assert len(key) == 52 + + +def test_find_or_generate_new_torv3_keys_first_run(tmpdir, capsys): + args = argparse.Namespace(ansible_path=str(tmpdir)) + + return_code = securedrop_admin.find_or_generate_new_torv3_keys(args) + + captured = capsys.readouterr() + assert 'Tor v3 onion service keys generated' in captured.out + assert return_code == 0 + + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + + with open(secret_key_path) as f: + v3_onion_service_keys = json.load(f) + + expected_keys = ['app_journalist_public_key', + 'app_journalist_private_key', + 'app_ssh_public_key', + 'app_ssh_private_key', + 'mon_ssh_public_key', + 'mon_ssh_private_key'] + for key in expected_keys: + assert key in v3_onion_service_keys.keys() + + +def test_find_or_generate_new_torv3_keys_subsequent_run(tmpdir, capsys): + args = argparse.Namespace(ansible_path=str(tmpdir)) + + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + old_keys = {'foo': 'bar'} + with open(secret_key_path, 'w') as f: + json.dump(old_keys, f) + + return_code = securedrop_admin.find_or_generate_new_torv3_keys(args) + + captured = capsys.readouterr() + assert 'Tor v3 onion service keys already exist' in captured.out + assert return_code == 0 + + with open(secret_key_path) as f: + v3_onion_service_keys = json.load(f) + + assert v3_onion_service_keys == old_keys diff --git a/molecule/testinfra/staging/app/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/molecule/testinfra/staging/app/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -29,6 +29,7 @@ def test_tor_service_hostnames(host, tor_service): # Declare regex only for THS; we'll build regex for ATHS only if # necessary, since we won't have the required values otherwise. ths_hostname_regex = r"[a-z0-9]{16}\.onion" + ths_hostname_regex_v3 = r"[a-z0-9]{56}\.onion" with host.sudo(): f = host.file("/var/lib/tor/services/{}/hostname".format( @@ -41,15 +42,24 @@ def test_tor_service_hostnames(host, tor_service): # All hostnames should contain at *least* the hostname. assert re.search(ths_hostname_regex, f.content_string) - if tor_service['authenticated']: + if tor_service['authenticated'] and tor_service['version'] == 2: # HidServAuth regex is approximately [a-zA-Z0-9/+], but validating # the entire entry is sane, and we don't need to nitpick the # charset. aths_hostname_regex = ths_hostname_regex + " .{22} # client: " + \ tor_service['client'] assert re.search("^{}$".format(aths_hostname_regex), f.content_string) - else: + elif tor_service['authenticated'] and tor_service['version'] == 3: + # For authenticated version 3 onion services, the authorized_client + # directory will exist and contain a file called client.auth. + client_auth = host.file( + "/var/lib/tor/services/{}/authorized_clients/client.auth".format( + tor_service['name'])) + assert client_auth.is_file + elif tor_service['version'] == 2: assert re.search("^{}$".format(ths_hostname_regex), f.content_string) + else: + assert re.search("^{}$".format(ths_hostname_regex_v3), f.content_string) @pytest.mark.parametrize('tor_service', sdvars.tor_services) @@ -61,7 +71,7 @@ def test_tor_services_config(host, tor_service): * HiddenServiceDir * HiddenServicePort - Only authenticated Onion Services must also include: + Only v2 authenticated Onion Services must also include: * HiddenServiceAuthorizeClient @@ -81,7 +91,10 @@ def test_tor_services_config(host, tor_service): # Ensure that service is hardcoded to v2, for compatibility # with newer versions of Tor, which default to v3. - version_string = "HiddenServiceVersion 2" + if tor_service['version'] == 2: + version_string = "HiddenServiceVersion 2" + else: + version_string = "" port_regex = "HiddenServicePort {} 127.0.0.1:{}".format( remote_port, local_port) @@ -89,9 +102,12 @@ def test_tor_services_config(host, tor_service): assert f.contains("^{}$".format(dir_regex)) assert f.contains("^{}$".format(port_regex)) - service_regex = "\n".join([dir_regex, version_string, port_regex]) + if version_string: + service_regex = "\n".join([dir_regex, version_string, port_regex]) + else: + service_regex = "\n".join([dir_regex, port_regex]) - if tor_service['authenticated']: + if tor_service['authenticated'] and tor_service['version'] == 2: auth_regex = "HiddenServiceAuthorizeClient stealth {}".format( tor_service['client']) assert f.contains("^{}$".format(auth_regex))
`make update-admin-pip-requirements` target fails ## Description Trying to update the requirements*.txt files for the onion v3 branch. ## Steps to Reproduce ``` make update-admin-pip-requirements ``` ## Expected Behavior The command should not error out and update the dependencies as required. ## Actual Behavior ``` $ make update-admin-pip-requirements ███ Updating admin pip requirements... Run with DOCKER_BUILD_VERBOSE=true for more information Docker image build in progress ..................................................................................... done ! Traceback (most recent call last): File "/opt/.venv/bin/pip-compile", line 10, in <module> sys.exit(cli()) File "/opt/.venv/local/lib/python2.7/site-packages/click/core.py", line 722, in __call__ return self.main(*args, **kwargs) File "/opt/.venv/local/lib/python2.7/site-packages/click/core.py", line 697, in main rv = self.invoke(ctx) File "/opt/.venv/local/lib/python2.7/site-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) File "/opt/.venv/local/lib/python2.7/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) File "/opt/.venv/local/lib/python2.7/site-packages/piptools/scripts/compile.py", line 129, in cli repository = PyPIRepository(pip_options, session, build_isolation) File "/opt/.venv/local/lib/python2.7/site-packages/piptools/repositories/pypi.py", line 73, in __init__ self.finder = PackageFinder(**finder_kwargs) TypeError: __init__() got an unexpected keyword argument 'index_urls' make[1]: *** [Makefile:10: update-pip-requirements] Error 1 make: *** [Makefile:32: update-admin-pip-requirements] Error 2 ```
2019-07-30T20:01:25Z
[]
[]
freedomofpress/securedrop
4,672
freedomofpress__securedrop-4672
[ "1189" ]
c2d222c6471219a4bcb80c06b1de4b087982ebbb
diff --git a/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py @@ -0,0 +1,107 @@ +"""delete orphaned submissions and replies + +Ref: https://github.com/freedomofpress/securedrop/issues/1189 + +Revision ID: 3da3fcab826a +Revises: 60f41bb14d98 +Create Date: 2018-11-25 19:40:25.873292 + +""" +import os +from alembic import op +import sqlalchemy as sa +from rm import secure_delete + +# raise the errors if we're not in production +raise_errors = os.environ.get("SECUREDROP_ENV", "prod") != "prod" + +try: + from journalist_app import create_app + from sdconfig import config + from store import NoFileFoundException, TooManyFilesException + from worker import create_queue +except ImportError: + # This is a fresh install, and config.py has not been created yet. + if raise_errors: + raise + +# revision identifiers, used by Alembic. +revision = '3da3fcab826a' +down_revision = '60f41bb14d98' +branch_labels = None +depends_on = None + + +def raw_sql_grab_orphaned_objects(table_name): + """Objects that have a source ID that doesn't exist in the + sources table OR a NULL source ID should be deleted.""" + return ('SELECT id, filename, source_id FROM {table} ' # nosec + 'WHERE source_id NOT IN (SELECT id FROM sources) ' + 'UNION SELECT id, filename, source_id FROM {table} ' # nosec + 'WHERE source_id IS NULL').format(table=table_name) + + +def upgrade(): + conn = op.get_bind() + submissions = conn.execute( + sa.text(raw_sql_grab_orphaned_objects('submissions')) + ).fetchall() + + replies = conn.execute( + sa.text(raw_sql_grab_orphaned_objects('replies')) + ).fetchall() + + try: + app = create_app(config) + with app.app_context(): + for submission in submissions: + try: + conn.execute( + sa.text(""" + DELETE FROM submissions + WHERE id=:id + """).bindparams(id=submission.id) + ) + + file_path = app.storage.path_without_filesystem_id(submission.filename) + create_queue().enqueue(secure_delete, file_path) + except NoFileFoundException: + # The file must have been deleted by the admin, remove the row + conn.execute( + sa.text(""" + DELETE FROM submissions + WHERE id=:id + """).bindparams(id=submission.id) + ) + except TooManyFilesException: + pass + + for reply in replies: + try: + conn.execute( + sa.text(""" + DELETE FROM replies + WHERE id=:id + """).bindparams(id=reply.id) + ) + + file_path = app.storage.path_without_filesystem_id(reply.filename) + create_queue().enqueue(secure_delete, file_path) + except NoFileFoundException: + # The file must have been deleted by the admin, remove the row + conn.execute( + sa.text(""" + DELETE FROM replies + WHERE id=:id + """).bindparams(id=reply.id) + ) + except TooManyFilesException: + pass + except: # noqa + if raise_errors: + raise + + +def downgrade(): + # This is a destructive alembic migration, it cannot be downgraded + pass diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -43,6 +43,23 @@ class PathException(Exception): pass +class TooManyFilesException(Exception): + """An exception raised by path_without_filesystem_id when too many + files has been found for a given submission or reply. + This could be due to a very unlikely collision between + journalist_designations. + """ + pass + + +class NoFileFoundException(Exception): + """An exception raised by path_without_filesystem_id when a file could + not be found for a given submission or reply. + This is likely due to an admin manually deleting files from the server. + """ + pass + + class NotEncrypted(Exception): """An exception raised if a file expected to be encrypted client-side is actually plaintext. @@ -107,6 +124,29 @@ def path(self, *s): self.verify(absolute) return absolute + def path_without_filesystem_id(self, filename): + # type: (str) -> str + """Get the normalized, absolute file path, within + `self.__storage_path` for a filename when the filesystem_id + is not known. + """ + + joined_paths = [] + for rootdir, _, files in os.walk(os.path.abspath(self.__storage_path)): + for file_ in files: + if file_ in filename: + joined_paths.append(os.path.join(rootdir, file_)) + + if len(joined_paths) > 1: + raise TooManyFilesException('Found duplicate files!') + elif len(joined_paths) == 0: + raise NoFileFoundException('File not found: {}'.format(filename)) + else: + absolute = joined_paths[0] + + self.verify(absolute) + return absolute + def get_bulk_archive(self, selected_submissions, zip_directory=''): # type: (List, str) -> _TemporaryFileWrapper """Generate a zip file from the selected submissions"""
diff --git a/securedrop/tests/migrations/helpers.py b/securedrop/tests/migrations/helpers.py --- a/securedrop/tests/migrations/helpers.py +++ b/securedrop/tests/migrations/helpers.py @@ -36,6 +36,10 @@ def random_chars(len, chars=string.printable): return ''.join([random.choice(chars) for _ in range(len)]) +def random_ascii_chars(len, chars=string.ascii_lowercase): + return ''.join([random.choice(chars) for _ in range(len)]) + + def random_datetime(nullable): if nullable and random_bool(): return None diff --git a/securedrop/tests/migrations/migration_3da3fcab826a.py b/securedrop/tests/migrations/migration_3da3fcab826a.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_3da3fcab826a.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +import random +import os +from uuid import uuid4 + +from sqlalchemy import text + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_ascii_chars, random_datetime, bool_or_none + + +TEST_DATA_DIR = '/tmp/securedrop/store' + + +def create_file_in_dummy_source_dir(filename): + filesystem_id = 'dummy' + basedir = os.path.join(TEST_DATA_DIR, filesystem_id) + + if not os.path.exists(basedir): + os.makedirs(basedir) + + path_to_file = os.path.join(basedir, filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) + + +class UpgradeTester: + + """This migration verifies that any orphaned submission or reply data from + deleted sources is also deleted. + """ + + def __init__(self, config): + self.config = config + self.app = create_app(config) + self.journalist_id = None + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + self.add_source() + self.valid_source_id = 1 + deleted_source_id = 2 + + # Add submissions and replies with and without a valid source + self.add_submission(self.valid_source_id) + self.add_submission(deleted_source_id) + self.add_submission(deleted_source_id, with_file=False) + self.add_submission(None) # NULL source + + self.add_reply(self.journalist_id, self.valid_source_id) + self.add_reply(self.journalist_id, deleted_source_id) + self.add_reply(self.journalist_id, deleted_source_id, with_file=False) + self.add_reply(self.journalist_id, None) # NULL source + + db.session.commit() + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid4()), + 'username': random_chars(50), + 'session_nonce': 0 + } + sql = '''INSERT INTO journalists (uuid, username, session_nonce) + VALUES (:uuid, :username, :session_nonce) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def add_reply(self, journalist_id, source_id, with_file=True): + filename = '1-' + random_ascii_chars(5) + '-' + random_ascii_chars(5) + '-reply.gpg' + params = { + 'uuid': str(uuid4()), + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': filename, + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, uuid, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :uuid, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + if with_file: + create_file_in_dummy_source_dir(filename) + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def add_submission(self, source_id, with_file=True): + filename = '1-' + random_ascii_chars(5) + '-' + random_ascii_chars(5) + '-doc.gz.gpg' + params = { + 'uuid': str(uuid4()), + 'source_id': source_id, + 'filename': filename, + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, + downloaded) + VALUES (:uuid, :source_id, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + if with_file: + create_file_in_dummy_source_dir(filename) + + def check_upgrade(self): + with self.app.app_context(): + submissions = db.engine.execute( + text('SELECT * FROM submissions')).fetchall() + + # Submissions without a source should be deleted + assert len(submissions) == 1 + for submission in submissions: + assert submission.source_id == self.valid_source_id + + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + + # Replies without a source should be deleted + assert len(replies) == 1 + for reply in replies: + assert reply.source_id == self.valid_source_id + + +class DowngradeTester: + # This is a destructive alembic migration, it cannot be downgraded + + def __init__(self, config): + self.config = config + + def load_data(self): + pass + + def check_downgrade(self): + pass diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -51,6 +51,49 @@ def test_path_returns_filename_of_items_within_folder(journalist_app, config): assert generated_absolute_path == expected_absolute_path +def test_path_without_filesystem_id(journalist_app, config): + filesystem_id = 'example' + item_filename = '1-quintuple_cant-msg.gpg' + + basedir = os.path.join(config.STORE_DIR, filesystem_id) + os.makedirs(basedir) + + path_to_file = os.path.join(basedir, item_filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) + + generated_absolute_path = \ + journalist_app.storage.path_without_filesystem_id(item_filename) + + expected_absolute_path = os.path.join(config.STORE_DIR, + filesystem_id, item_filename) + assert generated_absolute_path == expected_absolute_path + + +def test_path_without_filesystem_id_duplicate_files(journalist_app, config): + filesystem_id = 'example' + filesystem_id_duplicate = 'example2' + item_filename = '1-quintuple_cant-msg.gpg' + + basedir = os.path.join(config.STORE_DIR, filesystem_id) + duplicate_basedir = os.path.join(config.STORE_DIR, filesystem_id_duplicate) + + for directory in [basedir, duplicate_basedir]: + os.makedirs(directory) + path_to_file = os.path.join(directory, item_filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) + + with pytest.raises(store.TooManyFilesException): + journalist_app.storage.path_without_filesystem_id(item_filename) + + +def test_path_without_filesystem_id_no_file(journalist_app, config): + item_filename = 'not there' + with pytest.raises(store.NoFileFoundException): + journalist_app.storage.path_without_filesystem_id(item_filename) + + def test_verify_path_not_absolute(journalist_app, config): with pytest.raises(store.PathException): journalist_app.storage.verify(
Handle Submissions with no Sources in currently running instances. We need to handle submissions database entries in currently running instances that have no Source. This is a follow up issue to https://github.com/freedomofpress/securedrop/issues/1188.
@garrettr Can you add this as a milestone? Thanks! X-link: https://github.com/freedomofpress/securedrop/issues/1419. note: the correct place to address this ticket now is in an alembic revision (with no `downgrade` implemented as this is a destructive action)
2019-08-15T02:17:17Z
[]
[]
freedomofpress/securedrop
4,675
freedomofpress__securedrop-4675
[ "4629" ]
b91c437a4e5ed5120813af5021322bf8ef4884d9
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py --- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py +++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py @@ -7,6 +7,8 @@ import sys import subprocess +from shutil import copyfile + # check for root if os.geteuid() != 0: @@ -26,6 +28,16 @@ path_gui_updater = os.path.join(path_securedrop_root, 'journalist_gui/SecureDropUpdater') +paths_v3_authfiles = { + "app-journalist": os.path.join(path_securedrop_root, + 'install_files/ansible-base/app-journalist.auth_private'), + "app-ssh": os.path.join(path_securedrop_root, + 'install_files/ansible-base/app-ssh.auth_private'), + "mon-ssh": os.path.join(path_securedrop_root, + 'install_files/ansible-base/mon-ssh.auth_private') +} +path_onion_auth_dir = '/var/lib/tor/onion_auth' + # load torrc_additions if os.path.isfile(path_torrc_additions): with io.open(path_torrc_additions) as f: @@ -52,11 +64,35 @@ with io.open(path_torrc, 'w') as f: f.write(torrc + torrc_additions) -# reload tor +# check for v3 aths files +v3_authfiles_present = False +for f in paths_v3_authfiles.values(): + if os.path.isfile(f): + v3_authfiles_present = True + +# if there are v3 authfiles, make dir and copy them into place +debian_tor_uid = pwd.getpwnam("debian-tor").pw_uid +debian_tor_gid = grp.getgrnam("debian-tor").gr_gid + +if not os.path.isdir(path_onion_auth_dir): + os.mkdir(path_onion_auth_dir) + +os.chmod(path_onion_auth_dir, 0o700) +os.chown(path_onion_auth_dir, debian_tor_uid, debian_tor_gid) + +for key, f in paths_v3_authfiles.items(): + if os.path.isfile(f): + filename = os.path.basename(f) + new_f = os.path.join(path_onion_auth_dir, filename) + copyfile(f, new_f) + os.chmod(new_f, 0o400) + os.chown(new_f, debian_tor_uid, debian_tor_gid) + +# restart tor try: - subprocess.check_call(['systemctl', 'reload', '[email protected]']) + subprocess.check_call(['systemctl', 'restart', '[email protected]']) except subprocess.CalledProcessError: - sys.exit('Error reloading Tor') + sys.exit('Error restarting Tor') # Turn off "automatic-decompression" in Nautilus to ensure the original # submission filename is restored (see
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -665,7 +665,7 @@ def test_only_v3_onion_services(self, tmpdir): ansible_path='.', app_path=dirname(__file__)) site_config = securedrop_admin.SiteConfig(args) - with open("app-source-ths", "w") as fobj: + with open("app-sourcev3-ths", "w") as fobj: fobj.write("a" * 56 + ".onion\n") site_config.update_onion_version_config() site_config.save() @@ -676,7 +676,7 @@ def test_only_v3_onion_services(self, tmpdir): v2_onion_services: false v3_onion_services: true """) - os.remove("app-source-ths") + os.remove("app-sourcev3-ths") assert expected == data def test_validate_gpg_key(self, caplog):
[v3 onion migration] Modify securedrop-admin tailsconfig to use v3 if available ## Description This ticket is to modify `securedrop-admin tailsconfig` (used for configuring desktop shortcuts and `torrc` additions in the SecureDrop Tails workstations) to: * Use v3 addresses if v3 files are present for desktop shortcuts, else continue to use v2 * Add v3 Client auth details to `torrc` if they are present (and NOT add v2 HidServAuth so that it’s easier in the future to rip out v2 without making a round of updates in journalist and admin drives) Subticket of #2951
2019-08-16T16:41:18Z
[]
[]
freedomofpress/securedrop
4,710
freedomofpress__securedrop-4710
[ "4708" ]
d3d3ab79ac519e1545d78109c60cd605b3a29fa8
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -160,6 +160,23 @@ def validate(self, document): return True raise ValidationError(message="Must be either yes or no") + class ValidateYesNoForV3(Validator): + + def __init__(self, *args, **kwargs): + Validator.__init__(*args, **kwargs) + self.caller = args[0] + + def validate(self, document): + text = document.text.lower() + # Raise error if admin tries to disable v3 when v2 + # is already disabled. + if text == 'no' and \ + not self.caller._config_in_progress.get("v2_onion_services"): # noqa: E501 + raise ValidationError(message="Since you disabled v2 onion services, you must enable v3 onion services.") # noqa: E501 + if text == 'yes' or text == 'no': + return True + raise ValidationError(message="Must be either yes or no") + class ValidateFingerprint(Validator): def validate(self, document): text = document.text.replace(' ', '') @@ -261,6 +278,9 @@ def validate(self, document): def __init__(self, args): self.args = args self.config = {} + # Hold runtime configuration before save, to support + # referencing other responses during validation + self._config_in_progress = {} translations = SiteConfig.Locales( self.args.app_path).get_translations() translations = " ".join(translations) @@ -403,6 +423,16 @@ def __init__(self, args): SiteConfig.ValidateLocales(self.args.app_path), string.split, lambda config: True], + ['v2_onion_services', self.check_for_v2_onion(), bool, + u'Do you want to enable v2 onion services (recommended only for SecureDrop instances installed before 1.0.0)?', # noqa: E501 + SiteConfig.ValidateYesNo(), + lambda x: x.lower() == 'yes', + lambda config: True], + ['v3_onion_services', self.check_for_v3_onion, bool, + u'Do you want to enable v3 onion services (recommended)?', + SiteConfig.ValidateYesNoForV3(self), + lambda x: x.lower() == 'yes', + lambda config: True], ] def load_and_update_config(self): @@ -413,18 +443,15 @@ def load_and_update_config(self): def update_config(self): self.config.update(self.user_prompt_config()) - self.update_onion_version_config() self.save() self.validate_gpg_keys() self.validate_journalist_alert_email() return True - def update_onion_version_config(self): + def check_for_v2_onion(self): """ - This method updates onion service related configurations. + Check if v2 onion services are already enabled or not. """ - v2 = False - v3 = True source_ths = os.path.join(self.args.ansible_path, "app-source-ths") if os.path.exists(source_ths): # Means old installation data = "" @@ -433,30 +460,46 @@ def update_onion_version_config(self): data = data.strip() if len(data) < 56: # Old v2 onion address - v2 = True + return True + return False - # Now update the configuration - config = {"v2_onion_services": v2, - "v3_onion_services": v3} - self.config.update(config) + def check_for_v3_onion(self): + """ + Check if v3 onion services should be enabled by default or not. + """ + v2_value = self._config_in_progress.get("v2_onion_services", False) + # We need to see the value in the configuration file + # for v3_onion_services + v3_value = self.config.get("v3_onion_services", True) + return v3_value or not v2_value def user_prompt_config(self): - config = {} + self._config_in_progress = {} for desc in self.desc: (var, default, type, prompt, validator, transform, condition) = desc - if not condition(config): - config[var] = '' + if not condition(self._config_in_progress): + self._config_in_progress[var] = '' continue - config[var] = self.user_prompt_config_one(desc, - self.config.get(var)) - return config + self._config_in_progress[var] = self.user_prompt_config_one(desc, + self.config.get(var)) # noqa: E501 + return self._config_in_progress def user_prompt_config_one(self, desc, from_config): (var, default, type, prompt, validator, transform, condition) = desc - if from_config is not None: + if from_config is not None and var != "v3_onion_services": + # v3_onion_services must be true if v2 is disabled by the admin + # otherwise, we may end up in a situation where both v2 and v3 + # are disabled by the admin (by mistake). default = from_config prompt += ': ' + + # The following is for the dynamic check of the user input + # for the previous question, as we are calling the default value + # function dynamically, we can get the right value based on the + # previous user input. + if callable(default): + default = default() return self.validated_input(prompt, default, validator, transform) def validated_input(self, prompt, default, validator, transform):
diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -48,6 +48,70 @@ v3_onion_services: true ''' +WHEN_BOTH_TRUE = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: '' +journalist_alert_gpg_public_key: '' +journalist_gpg_fpr: '' +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: true +v3_onion_services: true +''' + +WHEN_ONLY_V2 = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: '' +journalist_alert_gpg_public_key: '' +journalist_gpg_fpr: '' +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: true +v3_onion_services: false +''' + JOURNALIST_ALERT_OUTPUT = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 @@ -251,6 +315,21 @@ def verify_locales_prompt(child): child.expect('Space separated list of additional locales to support') # noqa: E501 +def verify_v2_onion_for_first_time(child): + child.expect(r' installed before 1.0.0\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer) == ' no' # Expected default + + +def verify_v3_onion_for_first_time(child): + child.expect(r'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default + + +def verify_v3_onion_when_v2_is_enabled(child): + child.expect(r'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default + + def test_sdconfig_on_first_run(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), 'securedrop_admin/__init__.py') @@ -298,6 +377,10 @@ def test_sdconfig_on_first_run(): child.sendline('') verify_locales_prompt(child) child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur child.close() @@ -309,6 +392,130 @@ def test_sdconfig_on_first_run(): assert data == OUTPUT1 +def test_sdconfig_both_v2_v3_true(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + verify_journalist_gpg_key_prompt(child) + child.sendline('') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'yes') + verify_v3_onion_when_v2_is_enabled(child) + child.sendline('\b' * 3 + 'yes') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert data == WHEN_BOTH_TRUE + + +def test_sdconfig_only_v2_true(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + verify_journalist_gpg_key_prompt(child) + child.sendline('') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'yes') + verify_v3_onion_when_v2_is_enabled(child) + child.sendline('\b' * 3 + 'no') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert data == WHEN_ONLY_V2 + + def test_sdconfig_enable_journalist_alerts(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), 'securedrop_admin/__init__.py') @@ -361,6 +568,10 @@ def test_sdconfig_enable_journalist_alerts(): child.sendline('') verify_locales_prompt(child) child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur child.close() @@ -431,6 +642,10 @@ def test_sdconfig_enable_https_on_source_interface(): child.sendline('') verify_locales_prompt(child) child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur child.close() diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -619,66 +619,6 @@ def test_save(self, tmpdir): """) assert expected == io.open(site_config_path).read() - def test_old_v2_onion_services(self, tmpdir): - "Checks for exitsing v2 source address" - site_config_path = join(str(tmpdir), 'site_config') - args = argparse.Namespace(site_config=site_config_path, - ansible_path='.', - app_path=dirname(__file__)) - site_config = securedrop_admin.SiteConfig(args) - with open("app-source-ths", "w") as fobj: - fobj.write("aaaaaaaaaaaaaaaa.onion\n") - site_config.update_onion_version_config() - site_config.save() - data = "" - with open(site_config_path) as fobj: - data = fobj.read() - expected = textwrap.dedent("""\ - v2_onion_services: true - v3_onion_services: true - """) - os.remove("app-source-ths") - assert expected == data - - def test_no_v2_onion_services(self, tmpdir): - "Checks for new installation for only v3" - site_config_path = join(str(tmpdir), 'site_config') - args = argparse.Namespace(site_config=site_config_path, - ansible_path='.', - app_path=dirname(__file__)) - site_config = securedrop_admin.SiteConfig(args) - site_config.update_onion_version_config() - site_config.save() - data = "" - with open(site_config_path) as fobj: - data = fobj.read() - expected = textwrap.dedent("""\ - v2_onion_services: false - v3_onion_services: true - """) - assert expected == data - - def test_only_v3_onion_services(self, tmpdir): - "Checks for new installation for only v3 ths file" - site_config_path = join(str(tmpdir), 'site_config') - args = argparse.Namespace(site_config=site_config_path, - ansible_path='.', - app_path=dirname(__file__)) - site_config = securedrop_admin.SiteConfig(args) - with open("app-sourcev3-ths", "w") as fobj: - fobj.write("a" * 56 + ".onion\n") - site_config.update_onion_version_config() - site_config.save() - data = "" - with open(site_config_path) as fobj: - data = fobj.read() - expected = textwrap.dedent("""\ - v2_onion_services: false - v3_onion_services: true - """) - os.remove("app-sourcev3-ths") - assert expected == data - def test_validate_gpg_key(self, caplog): args = argparse.Namespace(site_config='INVALID', ansible_path='tests/files', @@ -817,6 +757,8 @@ def get_desc(self, site_config, var): def verify_desc_consistency_optional(self, site_config, desc): (var, default, etype, prompt, validator, transform, condition) = desc # verify the default passes validation + if callable(default): + default = default() assert site_config.user_prompt_config_one(desc, None) == default assert type(default) == etype @@ -825,6 +767,12 @@ def verify_desc_consistency(self, site_config, desc): (var, default, etype, prompt, validator, transform, condition) = desc with pytest.raises(ValidationError): site_config.user_prompt_config_one(desc, '') + # If we are testing v3_onion_services, that will create a default + # value of 'yes', means it will not raise the ValidationError. We + # are generating it below for test to behave properly with all + # other test cases. + if var == "v3_onion_services": + raise ValidationError() def verify_prompt_boolean( self, site_config, desc): @@ -835,6 +783,34 @@ def verify_prompt_boolean( assert site_config.user_prompt_config_one(desc, 'YES') is True assert site_config.user_prompt_config_one(desc, 'NO') is False + def verify_prompt_boolean_for_v3( + self, site_config, desc): + """As v3_onion_services input depends on input of + v2_onion_service, the answers will change. + """ + self.verify_desc_consistency(site_config, desc) + (var, default, etype, prompt, validator, transform, condition) = desc + assert site_config.user_prompt_config_one(desc, True) is True + # Because if no v2_onion_service, v3 will become True + assert site_config.user_prompt_config_one(desc, False) is True + assert site_config.user_prompt_config_one(desc, 'YES') is True + # Because if no v2_onion_service, v3 will become True + assert site_config.user_prompt_config_one(desc, 'NO') is True + + # Now we will set v2_onion_services as True so that we + # can set v3_onion_service as False. This is the case + # when an admin particularly marked v3 as False. + site_config._config_in_progress = {"v2_onion_services": True} + site_config.config = {"v3_onion_services": False} + + # The next two tests should use the default from the above line, + # means it will return False value. + assert site_config.user_prompt_config_one(desc, True) is False + assert site_config.user_prompt_config_one(desc, 'YES') is False + + assert site_config.user_prompt_config_one(desc, False) is False + assert site_config.user_prompt_config_one(desc, 'NO') is False + def test_desc_conditional(self): """Ensure that conditional prompts behave correctly. @@ -887,6 +863,8 @@ def auto_prompt(prompt, default, **kwargs): verify_prompt_enable_ssh_over_tor = verify_prompt_boolean verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency + verify_prompt_v2_onion_services = verify_prompt_boolean + verify_prompt_v3_onion_services = verify_prompt_boolean_for_v3 def verify_prompt_not_empty(self, site_config, desc): with pytest.raises(ValidationError):
./securedrop-admin sdconfig does not ask if we should enable v3 or not ## Description Right now if we run `./securedrop-admin sdconfig` it enables v3 onion address along wtih v2 (if already existing installation). It should ask the admin to enable v3 like any other configuration option. ## Steps to Reproduce `./securedrop-admin sdconfig` ## Expected Behavior It should ask if we want to enable v3 onion address or not. ## Actual Behavior It does not any question about version of the onion addresses. One thing to remember: we should not allow someone to disable both v2 and v3 services.
2019-08-27T19:03:24Z
[]
[]
freedomofpress/securedrop
4,713
freedomofpress__securedrop-4713
[ "4712" ]
00d6fbf7500f55b9a1e1fa6c52e1602169a95b61
diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py --- a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py @@ -15,7 +15,7 @@ from models import Submission, Reply from sdconfig import config from store import queued_add_checksum_for_file - from worker import rq_worker_queue + from worker import create_queue except: # noqa if raise_errors: raise @@ -59,7 +59,7 @@ def upgrade(): ) for (sub_id, filesystem_id, filename) in conn.execute(query): full_path = app.storage.path(filesystem_id, filename) - rq_worker_queue.enqueue( + create_queue().enqueue( queued_add_checksum_for_file, Submission, int(sub_id), @@ -76,7 +76,7 @@ def upgrade(): ) for (rep_id, filesystem_id, filename) in conn.execute(query): full_path = app.storage.path(filesystem_id, filename) - rq_worker_queue.enqueue( + create_queue().enqueue( queued_add_checksum_for_file, Reply, int(rep_id), diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -7,7 +7,6 @@ import io import six import scrypt -import subprocess from random import SystemRandom from base64 import b32encode @@ -15,6 +14,8 @@ from flask import current_app from pretty_bad_protocol._util import _is_stream, _make_binary_stream +import rm + import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking if typing.TYPE_CHECKING: @@ -24,6 +25,7 @@ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401stream from typing import Dict, List, Text # noqa: F401 + # to fix gpg error #78 on production os.environ['USERNAME'] = 'www-data' @@ -122,11 +124,9 @@ def __init__(self, def do_runtime_tests(self): if self.scrypt_id_pepper == self.scrypt_gpg_pepper: raise AssertionError('scrypt_id_pepper == scrypt_gpg_pepper') - # crash if we don't have srm: - try: - subprocess.check_call(['srm'], stdout=subprocess.PIPE) - except subprocess.CalledProcessError: - pass + # crash if we don't have a way to securely remove files + if not rm.check_secure_delete_capability(): + raise AssertionError("Secure file deletion is not possible.") def get_wordlist(self, locale): # type: (Text) -> List[str] diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -22,7 +22,6 @@ cleanup_expired_revoked_tokens) from models import Journalist from store import Storage -from worker import rq_worker_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -82,9 +81,6 @@ def create_app(config): gpg_key_dir=config.GPG_KEY_DIR, ) - app.config['RQ_WORKER_NAME'] = config.RQ_WORKER_NAME - rq_worker_queue.init_app(app) - @app.errorhandler(CSRFError) def handle_csrf_error(e): # type: (CSRFError) -> Response diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -13,9 +13,9 @@ from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, WrongPasswordException, FirstOrLastNameError, LoginThrottledException, BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) -from rm import srm +from rm import secure_delete from store import add_checksum_for_file -from worker import rq_worker_queue +from worker import create_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -173,7 +173,7 @@ def download(zip_basename, submissions): def delete_file(filesystem_id, filename, file_object): file_path = current_app.storage.path(filesystem_id, filename) - rq_worker_queue.enqueue(srm, file_path) + create_queue().enqueue(secure_delete, file_path) db.session.delete(file_object) db.session.commit() @@ -260,7 +260,7 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions - job = rq_worker_queue.enqueue(srm, current_app.storage.path(filesystem_id)) + job = create_queue().enqueue(secure_delete, current_app.storage.path(filesystem_id)) # Delete the source's reply keypair current_app.crypto_util.delete_reply_keypair(filesystem_id) diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -1,12 +1,10 @@ -#!/usr/bin/env python +#!/opt/venvs/securedrop-app-code/bin/python # -*- coding: utf-8 -*- import argparse -import datetime import logging import os import pwd -import qrcode import subprocess import shutil import signal @@ -14,20 +12,33 @@ import time import traceback +sys.path.insert(0, "/var/www/securedrop") # noqa: E402 + +import qrcode from six.moves import input -from contextlib import contextmanager from flask import current_app -from sqlalchemy import create_engine from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy.orm import sessionmaker os.environ['SECUREDROP_ENV'] = 'dev' # noqa -from sdconfig import config -import journalist_app from db import db -from models import Source, Journalist, PasswordError, InvalidUsernameException, FirstOrLastNameError +from models import ( + FirstOrLastNameError, + InvalidUsernameException, + Journalist, + PasswordError, +) +from management import app_context, config from management.run import run +from management.submissions import ( + add_check_db_disconnect_parser, + add_check_fs_disconnect_parser, + add_delete_db_disconnect_parser, + add_delete_fs_disconnect_parser, + add_list_db_disconnect_parser, + add_list_fs_disconnect_parser, + add_were_there_submissions_today, +) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s') log = logging.getLogger(__name__) @@ -39,7 +50,7 @@ def obtain_input(text): return input(text) -def reset(args): +def reset(args, context=None): """Clears the SecureDrop development applications' state, restoring them to the way they were immediately after running `setup_dev.sh`. This command: 1. Erases the development sqlite database file. @@ -77,7 +88,7 @@ def reset(args): # 3. Create the DB from the metadata directly when in 'dev' so # developers can test application changes without first writing # alembic migration. - with journalist_app.create_app(config).app_context(): + with context or app_context(): db.create_all() else: # We have to override the hardcoded .ini file because during testing @@ -172,8 +183,8 @@ def _make_password(): continue -def _add_user(is_admin=False): - with app_context(): +def _add_user(is_admin=False, context=None): + with context or app_context(): username = _get_username() first_name = _get_first_name() last_name = _get_last_name() @@ -251,9 +262,9 @@ def _get_delete_confirmation(user): return True -def delete_user(args): +def delete_user(args, context=None): """Deletes a journalist or admin from the application.""" - with app_context(): + with context or app_context(): username = _get_username_to_delete() try: selected_user = Journalist.query.filter_by(username=username).one() @@ -314,27 +325,6 @@ def init_db(args): subprocess.check_call(['alembic', 'upgrade', 'head']) -def were_there_submissions_today(args): - if config.DATABASE_ENGINE == "sqlite": - db_uri = (config.DATABASE_ENGINE + ":///" + - config.DATABASE_FILE) - else: - db_uri = ( - config.DATABASE_ENGINE + '://' + - config.DATABASE_USERNAME + ':' + - config.DATABASE_PASSWORD + '@' + - config.DATABASE_HOST + '/' + - config.DATABASE_NAME - ) - session = sessionmaker(bind=create_engine(db_uri))() - something = session.query(Source).filter( - Source.last_updated > - datetime.datetime.utcnow() - datetime.timedelta(hours=24) - ).count() > 0 - count_file = os.path.join(args.data_root, 'submissions_today.txt') - open(count_file, 'w').write(something and '1' or '0') - - def get_args(): parser = argparse.ArgumentParser(prog=__file__, description='Management ' 'and testing utility for SecureDrop.') @@ -378,23 +368,21 @@ def get_args(): set_clean_tmp_parser(subps, 'clean-tmp') set_clean_tmp_parser(subps, 'clean_tmp') - set_were_there_submissions_today(subps) - init_db_subp = subps.add_parser('init-db', help='initialize the DB') init_db_subp.add_argument('-u', '--user', help='Unix user for the DB', required=True) init_db_subp.set_defaults(func=init_db) - return parser - + add_check_db_disconnect_parser(subps) + add_check_fs_disconnect_parser(subps) + add_delete_db_disconnect_parser(subps) + add_delete_fs_disconnect_parser(subps) + add_list_db_disconnect_parser(subps) + add_list_fs_disconnect_parser(subps) + add_were_there_submissions_today(subps) -def set_were_there_submissions_today(subps): - parser = subps.add_parser( - 'were-there-submissions-today', - help=('Update the file indicating ' - 'whether submissions were received in the past 24h')) - parser.set_defaults(func=were_there_submissions_today) + return parser def set_clean_tmp_parser(subps, name): @@ -422,18 +410,17 @@ def setup_verbosity(args): logging.getLogger(__name__).setLevel(logging.INFO) -@contextmanager -def app_context(): - with journalist_app.create_app(config).app_context(): - yield - - def _run_from_commandline(): # pragma: no cover try: - args = get_args().parse_args() + parser = get_args() + args = parser.parse_args() setup_verbosity(args) - rc = args.func(args) - sys.exit(rc) + try: + rc = args.func(args) + sys.exit(rc) + except AttributeError: + parser.print_help() + parser.exit() except KeyboardInterrupt: sys.exit(signal.SIGINT) diff --git a/securedrop/management/__init__.py b/securedrop/management/__init__.py --- a/securedrop/management/__init__.py +++ b/securedrop/management/__init__.py @@ -0,0 +1,10 @@ +from contextlib import contextmanager + +import journalist_app +from sdconfig import config + + +@contextmanager +def app_context(): + with journalist_app.create_app(config).app_context(): + yield diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py new file mode 100644 --- /dev/null +++ b/securedrop/management/submissions.py @@ -0,0 +1,238 @@ +from __future__ import print_function + +import datetime +import os +import sys +import time + +from six.moves import input + +from db import db +from rm import secure_delete +from models import Source, Submission +from management import app_context + + +def find_disconnected_db_submissions(path): + submissions = db.session.query(Submission).order_by(Submission.id, Submission.filename).all() + + files_in_fs = {} + for directory, subdirs, files in os.walk(path): + for f in files: + files_in_fs[f] = os.path.abspath(os.path.join(directory, f)) + + disconnected_submissions = [s for s in submissions if s.filename not in files_in_fs] + + return disconnected_submissions + + +def check_for_disconnected_db_submissions(args): + """ + Check for Submission records whose files are missing. + """ + with app_context(): + disconnected = find_disconnected_db_submissions(args.store_dir) + if disconnected: + print( + "There are submissions in the database with no corresponding files. " + 'Run "manage.py list-disconnected-db-submissions" for details.' + ) + else: + print("No problems were found. All submissions' files are present.") + + +def list_disconnected_db_submissions(args): + """ + List the IDs of Submission records whose files are missing. + """ + with app_context(): + disconnected_submissions = find_disconnected_db_submissions(args.store_dir) + if disconnected_submissions: + print( + 'Run "manage.py delete-disconnected-db-submissions" to delete these records.', + file=sys.stderr + ) + for s in disconnected_submissions: + print(s.id) + + +def delete_disconnected_db_submissions(args): + """ + Delete Submission records whose files are missing. + """ + with app_context(): + disconnected_submissions = find_disconnected_db_submissions(args.store_dir) + ids = [s.id for s in disconnected_submissions] + + remove = args.force + if not args.force: + remove = input("Enter 'y' to delete all submissions missing files: ") == "y" + if remove: + print("Removing submission IDs {}...".format(ids)) + db.session.query(Submission).filter(Submission.id.in_(ids)).delete( + synchronize_session="fetch" + ) + db.session.commit() + else: + print("Not removing disconnected submissions in database.") + + +def find_disconnected_fs_submissions(path): + submissions = Submission.query.order_by(Submission.id, Submission.filename).all() + files_in_db = {s.filename: s for s in submissions} + + files_in_fs = {} + for directory, subdirs, files in os.walk(path): + for f in files: + files_in_fs[f] = os.path.abspath(os.path.join(directory, f)) + + disconnected_files = [] + for f, p in files_in_fs.items(): + if f not in files_in_db: + filesize = os.stat(p).st_size + disconnected_files.append((p, filesize)) + + disconnected_files = [t[0] for t in sorted(disconnected_files, key=lambda t: t[1])] + + return disconnected_files + + +def check_for_disconnected_fs_submissions(args): + """ + Check for files without a corresponding Submission record in the database. + """ + with app_context(): + disconnected = find_disconnected_fs_submissions(args.store_dir) + if disconnected: + print( + "There are files in the submission area with no corresponding records in the " + 'database. Run "manage.py list-disconnected-fs-submissions" for details.' + ) + else: + print("No unexpected files were found in the store.") + + +def list_disconnected_fs_submissions(args): + """ + List files without a corresponding Submission record in the database. + """ + with app_context(): + disconnected_files = find_disconnected_fs_submissions(args.store_dir) + if disconnected_files: + print( + 'Run "manage.py delete-disconnected-fs-submissions" to delete these files.', + file=sys.stderr + ) + for f in disconnected_files: + print(f) + + +def delete_disconnected_fs_submissions(args): + """ + Delete files without a corresponding Submission record in the database. + """ + with app_context(): + disconnected_files = find_disconnected_fs_submissions(args.store_dir) + bytes_deleted = 0 + time_elapsed = 0.0 + rate = 1.0 + filecount = len(disconnected_files) + eta = 1.0 + eta_msg = "" + for i, f in enumerate(disconnected_files, 1): + remove = args.force + if not args.force: + remove = input("Enter 'y' to delete {}: ".format(f)) == "y" + if remove: + filesize = os.stat(f).st_size + if i > 1: + eta = filesize / rate + eta_msg = " (ETA to remove {:d} bytes: {:.0f}s )".format(filesize, eta) + print("Securely removing file {}/{} {}{}...".format(i, filecount, f, eta_msg)) + start = time.time() + secure_delete(f) + file_elapsed = time.time() - start + bytes_deleted += filesize + time_elapsed += file_elapsed + rate = bytes_deleted / time_elapsed + print( + "elapsed: {:.2f}s rate: {:.1f} MB/s overall rate: {:.1f} MB/s".format( + file_elapsed, filesize / 1048576 / file_elapsed, rate / 1048576 + ) + ) + else: + print("Not removing {}.".format(f)) + + +def were_there_submissions_today(args, context=None): + with context or app_context(): + something = ( + db.session.query(Source) + .filter(Source.last_updated > datetime.datetime.utcnow() - datetime.timedelta(hours=24)) + .count() + > 0 + ) + count_file = os.path.join(args.data_root, "submissions_today.txt") + open(count_file, "w").write(something and "1" or "0") + + +def add_check_db_disconnect_parser(subps): + check_db_disconnect_subp = subps.add_parser( + "check-disconnected-db-submissions", + help="Check for submissions that exist in the database but not the filesystem.", + ) + check_db_disconnect_subp.set_defaults(func=check_for_disconnected_db_submissions) + + +def add_check_fs_disconnect_parser(subps): + check_fs_disconnect_subp = subps.add_parser( + "check-disconnected-fs-submissions", + help="Check for submissions that exist in the filesystem but not in the database.", + ) + check_fs_disconnect_subp.set_defaults(func=check_for_disconnected_fs_submissions) + + +def add_delete_db_disconnect_parser(subps): + delete_db_disconnect_subp = subps.add_parser( + "delete-disconnected-db-submissions", + help="Delete submissions that exist in the database but not the filesystem.", + ) + delete_db_disconnect_subp.set_defaults(func=delete_disconnected_db_submissions) + delete_db_disconnect_subp.add_argument( + "--force", action="store_true", help="Do not ask for confirmation." + ) + + +def add_delete_fs_disconnect_parser(subps): + delete_fs_disconnect_subp = subps.add_parser( + "delete-disconnected-fs-submissions", + help="Delete submissions that exist in the filesystem but not the database.", + ) + delete_fs_disconnect_subp.set_defaults(func=delete_disconnected_fs_submissions) + delete_fs_disconnect_subp.add_argument( + "--force", action="store_true", help="Do not ask for confirmation." + ) + + +def add_list_db_disconnect_parser(subps): + list_db_disconnect_subp = subps.add_parser( + "list-disconnected-db-submissions", + help="List submissions that exist in the database but not the filesystem.", + ) + list_db_disconnect_subp.set_defaults(func=list_disconnected_db_submissions) + + +def add_list_fs_disconnect_parser(subps): + list_fs_disconnect_subp = subps.add_parser( + "list-disconnected-fs-submissions", + help="List submissions that exist in the filesystem but not the database.", + ) + list_fs_disconnect_subp.set_defaults(func=list_disconnected_fs_submissions) + + +def add_were_there_submissions_today(subps): + parser = subps.add_parser( + "were-there-submissions-today", + help=("Update the file indicating " "whether submissions were received in the past 24h"), + ) + parser.set_defaults(func=were_there_submissions_today) diff --git a/securedrop/rm.py b/securedrop/rm.py --- a/securedrop/rm.py +++ b/securedrop/rm.py @@ -16,11 +16,95 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +import errno +import logging +import os import subprocess -def srm(fn): +def shred(path, delete=True): + # type: (str, bool) -> None + """ + Run shred on the file at the given path. + + Args: + path (str): The path to the file to shred. + delete (bool): Whether to unlink the file after shredding. + + Returns: + None + + Raises: + subprocess.CalledProcessError: If shred's return code is not zero. + EnvironmentError: If shred is not available. + """ + + if not os.path.exists(path): + raise EnvironmentError(path) + + if not os.path.isfile(path): + raise ValueError("The shred function only works on files.") + cmd = ["shred", "-z", "-n", "30"] + if delete: + cmd.append("-u") + cmd.append(path) + subprocess.check_call(cmd) + + +def secure_delete(path): # type: (str) -> str - subprocess.check_call(['srm', '-r', fn]) + """ + Securely deletes the file at ``path``. + + Args: + path (str): The path to the file to delete. + + Returns: + str: A string signaling success to rq. + + Raises: + subprocess.CalledProcessError: If shred's return code is not zero. + EnvironmentError: If shred is not available. + """ + path = os.path.abspath(path) + + directories = [] + targets = [] + if not os.path.isdir(path): + targets.append(path) + else: + for directory, subdirs, files in os.walk(path): + directories.append(directory) + directories.extend([os.path.abspath(os.path.join(directory, s)) for s in subdirs]) + for f in files: + targets.append(os.path.abspath(os.path.join(directory, f))) + + for t in targets: + shred(t) + + directories_to_remove = set(directories) + for d in reversed(sorted(directories_to_remove)): + os.rmdir(d) + # We need to return a non-`None` value so the rq worker writes this back to Redis return "success" + + +def check_secure_delete_capability(): + # type: () -> bool + """ + Checks the availability of the program we use for secure deletion. + + Returns: + bool: True if the program is available, otherwise False. + """ + try: + subprocess.check_output(["shred", "--help"]) + return True + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + logging.error("The shred utility is missing.") + except subprocess.CalledProcessError as e: + logging.error("The shred utility is broken: %s %s", e, e.output) + return False diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -20,7 +20,6 @@ from source_app.decorators import ignore_static from source_app.utils import logged_in from store import Storage -from worker import rq_worker_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -76,9 +75,6 @@ def create_app(config): gpg_key_dir=config.GPG_KEY_DIR, ) - app.config['RQ_WORKER_NAME'] = config.RQ_WORKER_NAME - rq_worker_queue.init_app(app) - @app.errorhandler(CSRFError) def handle_csrf_error(e): msg = render_template('session_timeout.html') diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -14,7 +14,7 @@ from secure_tempfile import SecureTemporaryFile -from worker import rq_worker_queue +from worker import create_queue import typing @@ -226,7 +226,7 @@ def rename_submission(self, def async_add_checksum_for_file(db_obj): # type: (Union[Submission, Reply]) -> str - return rq_worker_queue.enqueue( + return create_queue().enqueue( queued_add_checksum_for_file, type(db_obj), db_obj.id, diff --git a/securedrop/worker.py b/securedrop/worker.py --- a/securedrop/worker.py +++ b/securedrop/worker.py @@ -1,40 +1,127 @@ +import logging +import os +from typing import Optional, List + from redis import Redis -from rq import Queue +from rq.queue import Queue +from rq.worker import Worker, WorkerStatus +from rq.exceptions import InvalidJobOperation, NoSuchJobError +from rq.registry import StartedJobRegistry + +from sdconfig import config -class RqWorkerQueue(object): +def create_queue(name=None, timeout=3600): + # type: (str, int) -> Queue + """ + Create an rq ``Queue`` named ``name`` with default timeout ``timeout``. - ''' - A reference to a `rq` worker queue. + If ``name`` is omitted, ``config.RQ_WORKER_NAME`` is used. + """ + if name is None: + name = config.RQ_WORKER_NAME + q = Queue(name=name, connection=Redis(), default_timeout=timeout) + return q - Configuration: - `RQ_WORKER_NAME`: Name of the `rq` worker. - ''' - __EXT_NAME = 'rq-worker-queue' +def rq_workers(queue=None): + # type: (Queue) -> List[Worker] + """ + Returns the list of current rq ``Worker``s. + """ - def __init__(self, app=None): - self.__app = app - if app is not None: - self.init_app(app) + return Worker.all(connection=Redis(), queue=queue) - def init_app(self, app): - self.__app = app - self.__app.config.setdefault('RQ_WORKER_NAME', 'default') +def worker_for_job(job_id): + # type: (str) -> Optional[Worker] + """ + If the job is being run, return its ``Worker``. + """ + for worker in rq_workers(): + # If the worker process no longer exists, skip it. From "man 2 + # kill": "If sig is 0, then no signal is sent, but existence + # and permission checks are still performed; this can be used + # to check for the existence of a process ID or process group + # ID that the caller is permitted to signal." try: - # check for presence of existing extension dict - self.__app.extensions - except AttributeError: - self.__app.extensions = {} + os.kill(worker.pid, 0) + except OSError: + continue + + # If it's running and working on the given job, return it. + if worker.state == WorkerStatus.BUSY and job_id == worker.get_current_job_id(): + return worker + return None + - queue_name = self.__app.config['RQ_WORKER_NAME'] - queue = Queue(name=queue_name, connection=Redis(), default_timeout=3600) - self.__app.extensions[self.__EXT_NAME] = queue +def requeue_interrupted_jobs(queue_name=None): + # type: (str) -> None + """ + Requeues jobs found in the given queue's started job registry. - def enqueue(self, *nargs, **kwargs): - queue = self.__app.extensions[self.__EXT_NAME] - return queue.enqueue(*nargs, **kwargs) + Only restarts those that aren't already queued or being run. + When rq starts a job, it records it in the queue's started job + registry. If the server is rebooted before the job completes, the + job is not automatically restarted from the information in the + registry. For tasks like secure deletion of files, this means that + information thought to be deleted is still present in the case of + seizure or compromise. We have manage.py tasks to clean such files + up, but this utility attempts to reduce the need for manual + intervention by automatically resuming interrupted jobs. -rq_worker_queue = RqWorkerQueue() + This function is predicated on a risky assumption: that all jobs + are idempotent. At time of writing, we use rq for securely + deleting submission files and hashing submissions for the ETag + header. Both of these can be safely repeated. If we add rq tasks + that cannot, this function should be improved to omit those. + """ + queue = create_queue(queue_name) + started_job_registry = StartedJobRegistry(queue=queue) + + queued_job_ids = queue.get_job_ids() + logging.debug("queued jobs: {}".format(queued_job_ids)) + started_job_ids = started_job_registry.get_job_ids() + logging.debug("started jobs: {}".format(started_job_ids)) + job_ids = [j for j in started_job_ids if j not in queued_job_ids] + logging.debug("candidate job ids: {}".format(job_ids)) + + if not job_ids: + logging.info("No interrupted jobs found in started job registry.") + + for job_id in job_ids: + logging.debug("Considering job %s", job_id) + try: + job = started_job_registry.job_class.fetch(job_id, started_job_registry.connection) + except NoSuchJobError as e: + logging.error( + "Could not find details for job %s: %s", job_id, e + ) + continue + + logging.debug( + "Job %s enqueued at %s, started at %s", job_id, job.enqueued_at, job.started_at + ) + + worker = worker_for_job(job_id) + if worker: + logging.info( + "Skipping job %s, which is already being run by worker %s", job_id, worker.key + ) + continue + + logging.info("Requeuing job %s", job) + + try: + started_job_registry.remove(job) + except InvalidJobOperation as e: + logging.error("Could not remove job %s from started job registry: %s", job, e) + continue + + try: + queue.enqueue_job(job) + logging.debug("Job now enqueued at %s, started at %s", job.enqueued_at, job.started_at) + except Exception as e: + logging.error("Could not requeue job %s: %s", job, e) + continue
diff --git a/molecule/testinfra/staging/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py --- a/molecule/testinfra/staging/app-code/test_redis_worker.py +++ b/molecule/testinfra/staging/app-code/test_redis_worker.py @@ -6,27 +6,34 @@ securedrop_test_vars = pytest.securedrop_test_vars [email protected]('config_line', [ - '[program:securedrop_worker]', - 'command=/opt/venvs/securedrop-app-code/bin/rqworker', - "directory={}".format(securedrop_test_vars.securedrop_code), - 'autostart=true', - 'autorestart=true', - 'startretries=3', - 'stderr_logfile=/var/log/securedrop_worker/err.log', - 'stdout_logfile=/var/log/securedrop_worker/out.log', - "user={}".format(securedrop_test_vars.securedrop_user), -]) [email protected]( + "config_line", + [ + "[program:securedrop_worker]", + "command=/opt/venvs/securedrop-app-code/bin/rqworker", + "directory={}".format(securedrop_test_vars.securedrop_code), + ( + 'environment=PYTHONPATH="/var/www/securedrop:' + '/opt/venvs/securedrop-app-code/lib/python3.5/site-packages"' + ), + "autostart=true", + "autorestart=true", + "startretries=3", + "stderr_logfile=/var/log/securedrop_worker/rqworker.err", + "stdout_logfile=/var/log/securedrop_worker/rqworker.out", + "user={}".format(securedrop_test_vars.securedrop_user), + ], +) def test_redis_worker_configuration(host, config_line): """ Ensure SecureDrop Redis worker config for supervisor service management is configured correctly. """ - f = host.file('/etc/supervisor/conf.d/securedrop_worker.conf') + f = host.file("/etc/supervisor/conf.d/securedrop_worker.conf") # Config lines may have special characters such as [] which will # throw off the regex matching, so let's escape those chars. regex = re.escape(config_line) - assert f.contains('^{}$'.format(regex)) + assert f.contains("^{}$".format(regex)) def test_redis_worker_config_file(host): @@ -37,7 +44,7 @@ def test_redis_worker_config_file(host): Using separate test so that the parametrization doesn't rerun the file mode checks, which would be useless. """ - f = host.file('/etc/supervisor/conf.d/securedrop_worker.conf') + f = host.file("/etc/supervisor/conf.d/securedrop_worker.conf") assert f.is_file assert f.mode == 0o644 assert f.user == "root" diff --git a/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py b/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py @@ -0,0 +1,52 @@ +import pytest +import re + + +testinfra_hosts = ["app-staging"] +securedrop_test_vars = pytest.securedrop_test_vars + + [email protected]( + "config_line", + [ + "[program:securedrop_rqrequeue]", + ( + "command=/opt/venvs/securedrop-app-code/bin/python " + "/var/www/securedrop/scripts/rqrequeue --interval 60" + ), + "directory={}".format(securedrop_test_vars.securedrop_code), + ( + 'environment=PYTHONPATH="/var/www/securedrop:' + '/opt/venvs/securedrop-app-code/lib/python3.5/site-packages"' + ), + "autostart=true", + "autorestart=true", + "startretries=3", + "stderr_logfile=/var/log/securedrop_worker/rqrequeue.err", + "stdout_logfile=/var/log/securedrop_worker/rqrequeue.out", + "user={}".format(securedrop_test_vars.securedrop_user), + ], +) +def test_rqrequeue_configuration(host, config_line): + """ + Ensure Supervisor config for rqrequeue is correct. + """ + f = host.file("/etc/supervisor/conf.d/securedrop_rqrequeue.conf") + # Config lines may have special characters such as [] which will + # throw off the regex matching, so let's escape those chars. + regex = re.escape(config_line) + assert f.contains("^{}$".format(regex)) + + +def test_rqrequeue_config_file(host): + """ + Check ownership and mode of Supervisor config for rqrequeue. + + Using separate test so that the parametrization doesn't rerun + the file mode checks, which would be useless. + """ + f = host.file("/etc/supervisor/conf.d/securedrop_rqrequeue.conf") + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -16,12 +16,12 @@ def test_apache_default_docroot_is_absent(host): @pytest.mark.parametrize('package', [ 'apparmor-utils', + 'coreutils', 'gnupg2', 'haveged', 'python', 'python-pip', 'redis-server', - 'secure-delete', 'sqlite3', 'supervisor', ]) diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -238,7 +238,7 @@ def _start_test_rqworker(config): subprocess.Popen(['rqworker', config.RQ_WORKER_NAME, '-P', config.SECUREDROP_ROOT, '--pid', TEST_WORKER_PIDFILE, - '--logging_level', 'debug', + '--logging_level', 'DEBUG', '-v'], stdout=tmp_logfile, stderr=subprocess.STDOUT) diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1702,7 +1702,7 @@ def test_delete_source_deletes_docs_on_disk(journalist_app, job = journalist_app_module.utils.delete_collection( test_source['filesystem_id']) - # Wait up to 5s to wait for Redis worker `srm` operation to complete + # Wait up to 5s to wait for Redis worker secure deletion to complete utils.asynchronous.wait_for_redis_worker(job) # Encrypted documents no longer exist diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py --- a/securedrop/tests/test_manage.py +++ b/securedrop/tests/test_manage.py @@ -1,20 +1,22 @@ # -*- coding: utf-8 -*- import argparse -import io import datetime +import io import logging import os -import manage -import mock import time -os.environ['SECUREDROP_ENV'] = 'test' # noqa - +import manage +import mock +from management import submissions from models import Journalist, db + from .utils import db_helper +os.environ['SECUREDROP_ENV'] = 'test' # noqa + YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc', 'cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7'] @@ -70,21 +72,14 @@ def test_handle_invalid_secret(journalist_app, config, mocker, capsys): mocker.patch("manage._get_yubikey_usage", return_value=True), mocker.patch("manage.obtain_input", side_effect=YUBIKEY_HOTP), - original_config = manage.config - - try: - # We need to override the config to point at the per-test DB - manage.config = config - + with journalist_app.app_context() as context: # We will try to provide one invalid and one valid secret - return_value = manage._add_user() + return_value = manage._add_user(context=context) out, err = capsys.readouterr() assert return_value == 0 assert 'Try again.' in out assert 'successfully added' in out - finally: - manage.config = original_config # Note: we use the `journalist_app` fixture because it creates the DB @@ -98,14 +93,9 @@ def test_exception_handling_when_duplicate_username(journalist_app, mocker.patch("manage._get_last_name", return_value='') mocker.patch("manage._get_yubikey_usage", return_value=False) - original_config = manage.config - - try: - # We need to override the config to point at the per-test DB - manage.config = config - + with journalist_app.app_context() as context: # Inserting the user for the first time should succeed - return_value = manage._add_user() + return_value = manage._add_user(context=context) out, err = capsys.readouterr() assert return_value == 0 @@ -116,8 +106,6 @@ def test_exception_handling_when_duplicate_username(journalist_app, out, err = capsys.readouterr() assert return_value == 1 assert 'ERROR: That username is already taken!' in out - finally: - manage.config = original_config # Note: we use the `journalist_app` fixture because it creates the DB @@ -130,19 +118,12 @@ def test_delete_user(journalist_app, config, mocker): return_value='test-user-56789') mocker.patch('manage._get_delete_confirmation', return_value=True) - original_config = manage.config - - try: - # We need to override the config to point at the per-test DB - manage.config = config - - return_value = manage._add_user() + with journalist_app.app_context() as context: + return_value = manage._add_user(context=context) assert return_value == 0 return_value = manage.delete_user(args=None) assert return_value == 0 - finally: - manage.config = original_config # Note: we use the `journalist_app` fixture because it creates the DB @@ -151,17 +132,11 @@ def test_delete_non_existent_user(journalist_app, config, mocker, capsys): return_value='does-not-exist') mocker.patch('manage._get_delete_confirmation', return_value=True) - original_config = manage.config - - try: - # We need to override the config to point at the per-test DB - manage.config = config - return_value = manage.delete_user(args=None) + with journalist_app.app_context() as context: + return_value = manage.delete_user(args=None, context=context) out, err = capsys.readouterr() assert return_value == 0 assert 'ERROR: That user was not found!' in out - finally: - manage.config = original_config def test_get_username_to_delete(mocker): @@ -175,21 +150,19 @@ def test_reset(journalist_app, test_journo, alembic_config, config): try: # We need to override the config to point at the per-test DB manage.config = config + with journalist_app.app_context() as context: + # Override the hardcoded alembic.ini value + manage.config.TEST_ALEMBIC_INI = alembic_config - # Override the hardcoded alembic.ini value - manage.config.TEST_ALEMBIC_INI = alembic_config + args = argparse.Namespace(store_dir=config.STORE_DIR) + return_value = manage.reset(args=args, context=context) - args = argparse.Namespace(store_dir=config.STORE_DIR) - return_value = manage.reset(args=args) + assert return_value == 0 + assert os.path.exists(config.DATABASE_FILE) + assert os.path.exists(config.STORE_DIR) - assert return_value == 0 - assert os.path.exists(config.DATABASE_FILE) - assert os.path.exists(config.STORE_DIR) - - # Verify journalist user present in the database is gone - with journalist_app.app_context(): - res = Journalist.query \ - .filter_by(username=test_journo['username']).one_or_none() + # Verify journalist user present in the database is gone + res = Journalist.query.filter_by(username=test_journo['username']).one_or_none() assert res is None finally: manage.config = original_config @@ -245,25 +218,18 @@ def test_clean_tmp_removed(config, caplog): def test_were_there_submissions_today(source_app, config): - original_config = manage.config - try: + with source_app.app_context() as context: # We need to override the config to point at the per-test DB - manage.config = config data_root = config.SECUREDROP_DATA_ROOT - args = argparse.Namespace(data_root=data_root, - verbose=logging.DEBUG) - - with source_app.app_context(): - count_file = os.path.join(data_root, 'submissions_today.txt') - source, codename = db_helper.init_source_without_keypair() - source.last_updated = (datetime.datetime.utcnow() - - datetime.timedelta(hours=24*2)) - db.session.commit() - manage.were_there_submissions_today(args) - assert io.open(count_file).read() == "0" - source.last_updated = datetime.datetime.utcnow() - db.session.commit() - manage.were_there_submissions_today(args) - assert io.open(count_file).read() == "1" - finally: - manage.config = original_config + args = argparse.Namespace(data_root=data_root, verbose=logging.DEBUG) + + count_file = os.path.join(data_root, 'submissions_today.txt') + source, codename = db_helper.init_source_without_keypair() + source.last_updated = (datetime.datetime.utcnow() - datetime.timedelta(hours=24*2)) + db.session.commit() + submissions.were_there_submissions_today(args, context) + assert io.open(count_file).read() == "0" + source.last_updated = datetime.datetime.utcnow() + db.session.commit() + submissions.were_there_submissions_today(args, context) + assert io.open(count_file).read() == "1" diff --git a/securedrop/tests/test_rm.py b/securedrop/tests/test_rm.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_rm.py @@ -0,0 +1,99 @@ +""" +Test secure deletion utilities in securedrop/rm.py +""" +import os + +import pytest + +import rm + + +def test_secure_delete_capability(config): + assert rm.check_secure_delete_capability() is True + + path = os.environ["PATH"] + try: + os.environ["PATH"] = "{}:{}".format("/bin", config.TEMP_DIR) + assert rm.check_secure_delete_capability() is False + fakeshred = os.path.join(config.TEMP_DIR, "shred") + with open(fakeshred, "w") as f: + f.write("#!/bin/bash\nexit1\n") + os.chmod(fakeshred, 0o700) + assert rm.check_secure_delete_capability() is False + finally: + os.environ["PATH"] = path + + +def test_shred(config): + testfile = "test_shred.txt" + content = "abc123\n" + + # non-existent target should raise an exception + with pytest.raises(EnvironmentError): + rm.shred(os.path.abspath(os.path.join(config.TEMP_DIR, "nonexistentshredtarget"))) + + # a non-file target should raise an exception + d = os.path.abspath(os.path.join(config.TEMP_DIR, "nonexistentshredtarget")) + os.makedirs(d) + with pytest.raises(ValueError): + rm.shred(d) + os.rmdir(d) + + with open(testfile, "w") as f: + f.write(content) + + with open(testfile) as f: + read_content = f.read() + assert read_content == content + + # Shred without deleting, so we can check the new content + rm.shred(testfile, delete=False) + + with open(testfile) as f: + read_content = f.read() + assert read_content != content + + # Shred and delete + rm.shred(testfile) + assert os.path.exists(testfile) is False + + +def test_secure_delete(config): + content = "abc123\n" + testfile = "test_shred.txt" + + # Shred a file + testfile1 = os.path.abspath(os.path.join(config.TEMP_DIR, testfile)) + with open(testfile1, "w") as f: + f.write(content) + + assert os.path.exists(testfile1) + rm.secure_delete(testfile1) + assert os.path.exists(testfile1) is False + + # Shred a directory + testdir = os.path.abspath(os.path.join(config.TEMP_DIR, "shredtest1")) + testsubdir1 = os.path.abspath(os.path.join(testdir, "shredtest1.1")) + testsubdir2 = os.path.abspath(os.path.join(testdir, "shredtest1.2")) + + os.makedirs(testsubdir1) + os.makedirs(testsubdir2) + + testfile1 = os.path.abspath(os.path.join(testdir, testfile)) + with open(testfile1, "w") as f: + f.write(content) + + testfile2 = os.path.abspath(os.path.join(testsubdir1, testfile)) + with open(testfile2, "w") as f: + f.write(content) + + assert os.path.exists(testfile1) + assert os.path.exists(testfile2) + + rm.secure_delete(testdir) + + assert os.path.exists(testfile1) is False + assert os.path.exists(testfile2) is False + assert os.path.exists(testsubdir1) is False + assert os.path.exists(testsubdir2) is False + assert os.path.exists(testdir) is False diff --git a/securedrop/tests/test_submission_cleanup.py b/securedrop/tests/test_submission_cleanup.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_submission_cleanup.py @@ -0,0 +1,66 @@ +import argparse +import os + +from db import db +from management import submissions +from models import Submission + +from tests import utils + + +def test_delete_disconnected_db_submissions(journalist_app, config): + """ + Test that Submission records without corresponding files are deleted. + """ + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + source_id = source.id + + # make two submissions + utils.db_helper.submit(source, 2) + submission_id = source.submissions[0].id + + # remove one submission's file + f1 = os.path.join(config.STORE_DIR, source.filesystem_id, source.submissions[0].filename) + assert os.path.exists(f1) + os.remove(f1) + assert os.path.exists(f1) is False + + # check that the single disconnect is seen + disconnects = submissions.find_disconnected_db_submissions(config.STORE_DIR) + assert len(disconnects) == 1 + assert disconnects[0].filename == source.submissions[0].filename + + # remove the disconnected Submission + args = argparse.Namespace(force=True, store_dir=config.STORE_DIR) + submissions.delete_disconnected_db_submissions(args) + + assert db.session.query(Submission).filter(Submission.id == submission_id).count() == 0 + assert db.session.query(Submission).filter(Submission.source_id == source_id).count() == 1 + + +def test_delete_disconnected_fs_submissions(journalist_app, config): + """ + Test that files in the store without corresponding Submission records are deleted. + """ + source, codename = utils.db_helper.init_source() + + # make two submissions + utils.db_helper.submit(source, 2) + source_filesystem_id = source.filesystem_id + submission_filename = source.submissions[0].filename + disconnect_path = os.path.join(config.STORE_DIR, source_filesystem_id, submission_filename) + + # delete the first Submission record + db.session.delete(source.submissions[0]) + db.session.commit() + + disconnects = submissions.find_disconnected_fs_submissions(config.STORE_DIR) + assert len(disconnects) == 1 + assert disconnects[0] == disconnect_path + assert os.path.exists(disconnect_path) + + args = argparse.Namespace(force=True, store_dir=config.STORE_DIR) + submissions.delete_disconnected_fs_submissions(args) + + assert os.path.exists(disconnect_path) is False diff --git a/securedrop/tests/test_worker.py b/securedrop/tests/test_worker.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_worker.py @@ -0,0 +1,168 @@ +import logging +import os +import signal +import subprocess +import time + +import worker +from rq.worker import WorkerStatus + + +def layabout(): + """ + Function that just sleeps for an hour. + """ + time.sleep(3600) + + +def start_rq_worker(config, queue_name=None): + """ + Launches an rq worker process. + """ + if queue_name is None: + queue_name = config.RQ_WORKER_NAME + return subprocess.Popen( + [ + "/opt/venvs/securedrop-app-code/bin/rqworker", + "--path", + config.SECUREDROP_ROOT, + queue_name + ], + preexec_fn=os.setsid + ) + + +def test_no_interrupted_jobs(caplog): + """ + Tests requeue_interrupted_jobs when there are no interrupted jobs. + """ + caplog.set_level(logging.DEBUG) + + q = worker.create_queue() + try: + assert len(q.get_job_ids()) == 0 + worker.requeue_interrupted_jobs() + assert "No interrupted jobs found in started job registry." in caplog.text + finally: + q.delete() + + +def test_job_interruption(config, caplog): + """ + Tests that a job is requeued unless it is already being run. + """ + caplog.set_level(logging.DEBUG) + + queue_name = "test_job_interruption" + q = worker_process = None + try: + q = worker.create_queue(queue_name) + + # submit a job that sleeps for an hour + job = q.enqueue(layabout) + assert len(q.get_job_ids()) == 1 + + # launch worker processes + worker_process = start_rq_worker(config, queue_name) + + i = 0 + while i < 20: + if len(worker.rq_workers(q)) == 1: + break + time.sleep(0.1) + + assert len(worker.rq_workers(q)) == 1 + + i = 0 + while i < 20: + w = worker.worker_for_job(job.id) + if w: + break + i += 1 + time.sleep(0.1) + assert w is not None + + # the running job should not be requeued + worker.requeue_interrupted_jobs(queue_name) + skipped = "Skipping job {}, which is already being run by worker {}".format(job.id, w.key) + assert skipped in caplog.text + + # kill the process group, to kill the worker and its workhorse + os.killpg(worker_process.pid, signal.SIGKILL) + worker_process.wait() + caplog.clear() + + # after killing the worker, the interrupted job should be requeued + worker.requeue_interrupted_jobs(queue_name) + print(caplog.text) + assert "Requeuing job {}".format(job) in caplog.text + assert len(q.get_job_ids()) == 1 + finally: + q.delete() + if worker_process: + try: + os.killpg(worker_process.pid, 0) + os.killpg(worker_process.pid, signal.SIGKILL) + except OSError: + logging.debug("worker_process already gone.") + + +def test_worker_for_job(config): + """ + Tests that worker_for_job works when there are multiple workers. + """ + + queue_name = "test_worker_for_job" + q = worker_process = second_process = None + try: + q = worker.create_queue(queue_name) + assert len(worker.rq_workers(q)) == 0 + + # launch worker processes + worker_process = start_rq_worker(config, queue_name) + second_process = start_rq_worker(config, queue_name) + + i = 0 + while i < 20: + if len(worker.rq_workers(q)) == 2: + break + time.sleep(0.1) + + assert len(worker.rq_workers(q)) == 2 + + worker.rq_workers(q)[0].set_state(WorkerStatus.SUSPENDED) + + logging.debug( + [ + "{}: state={}, job={}".format(w.pid, w.get_state(), w.get_current_job_id()) + for w in worker.rq_workers(q) + ] + ) + + # submit a job that sleeps for an hour + job = q.enqueue(layabout) + + i = 0 + while i < 20: + w = worker.worker_for_job(job.id) + if w: + break + i += 1 + time.sleep(0.1) + assert w is not None + + finally: + q.delete() + if worker_process: + try: + os.killpg(worker_process.pid, 0) + os.killpg(worker_process.pid, signal.SIGKILL) + except OSError: + logging.debug("worker_process already gone.") + + if second_process: + try: + os.killpg(second_process.pid, 0) + os.killpg(second_process.pid, signal.SIGKILL) + except OSError: + logging.debug("second_process already gone.")
Need a tool to resolve inconsistencies between submission state in database and filesystem We have several problems involving inconsistent submission state between the database and filesystem, largely due to the time `srm` takes, but it can also arise from hardware failures. One is tracked in #4711 -- queued removal jobs are lost on reboot. Another is that jobs are limited to an hour of runtime, so removal of large files can be interrupted before completion -- though the _observed_ behavior noted in freedomofpress/securedrop-temp#1 is that `srm` can complete despite the job failing. We shouldn't trust this happy-but-unexpected outcome. Either case could result in the submission being deleted from the database, but still present on disk. This scenario is dangerous from a security standpoint, since data the organization believes to be gone is still in fact available to attackers or authorities. It also poses operational problems; the large files the system was likely to have been unable to delete add a lot of time to backup and restore operations. Conversely, in a disaster recovery scenario it could happen that the database is restored, but some submissions cannot be, which is going to cause unexpected errors in the user interface. It would be good to have a tool that can report inconsistencies between the database and filesystem, and possibly (and optionally) reconcile them, removing files that have no associated database record, or removing database submissions for which no file can be found.
2019-08-27T20:21:08Z
[]
[]
freedomofpress/securedrop
4,720
freedomofpress__securedrop-4720
[ "4677" ]
549a056aa222954d6ee65a45f6ca3d479ee83188
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -446,6 +446,25 @@ def update_config(self): self.save() self.validate_gpg_keys() self.validate_journalist_alert_email() + self.validate_https_and_v3() + return True + + def validate_https_and_v3(self): + """ + Checks if https is enabled with v3 onion service. + + :returns: False if both v3 and https enabled, True otherwise. + """ + warning_msg = ("You have configured HTTPS on your source interface " + "and v3 onion services. " + "IMPORTANT: Ensure that you update your certificate " + "to include your v3 source URL before advertising " + "it to sources! ") + + if self.config.get("v3_onion_services", False) and \ + self.config.get("securedrop_app_https_certificate_cert_src"): + print(warning_msg) + return False return True def check_for_v2_onion(self):
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -1042,3 +1042,22 @@ def test_find_or_generate_new_torv3_keys_subsequent_run(tmpdir, capsys): v3_onion_service_keys = json.load(f) assert v3_onion_service_keys == old_keys + + +def test_v3_and_https_cert_message(tmpdir, capsys): + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.config = {"v3_onion_services": False, + "securedrop_app_https_certificate_cert_src": "ab.crt"} # noqa: E501 + # This should return True as v3 is not setup + assert site_config.validate_https_and_v3() + + # This should return False as v3 and https are both setup + site_config.config.update({"v3_onion_services": True}) + assert not site_config.validate_https_and_v3() + + # This should return True as https is not setup + site_config.config.update({"securedrop_app_https_certificate_cert_src": ""}) # noqa: E501 + assert site_config.validate_https_and_v3()
[v3 onion migration] warn user if they enable v3 and have https enabled In `securedrop-admin sdconfig` when a user runs with `v3_onion_services=True`, we should: 1. (minimum) If HTTPS is also enabled, it should print a warning, but not prevent the writing of the config. They should be warned that this change will apply their configured cert to both their v2 and v3 interfaces and until they have a v3 cert, they should not share or advertise their v3 source URL with sources. 2. (better) Inspect the names in the configured cert and warn the user only: when the cert hasn't been updated to include their v3 URLs (predicated on the user having enabled v3 once already) OR the v3 URLs don't exist locally (since this means they are enabling v3 onion services for the first time, they have to run securedrop-admin install at least once in order to get the new URLs).
Good news, I have found a cert issued by Digicert with a v3 onion URL (and a v2 onion URL) included in the subject alternative name field: https://crt.sh/?id=351449246. When using the v3 cert one sees in Tor Browser a little onion and padlock icon: ![Screen Shot 2019-08-22 at 12 48 33 PM](https://user-images.githubusercontent.com/7832803/63533723-600f2d00-c4db-11e9-9840-42cd84c9d899.png) so for this "better" implementation for this ticket we can: 1. parse cert, 2. look in common name, subject alternative name fields, 3. alert the user if either the v2 OR v3 URL is missing oh yeah and there's also Cloudflare's https://dns4torpnlfs2ifuz2s2yf3fc7rdmsbhm6rw75euj35pac6ap25zgqad.onion/ (Digicert)
2019-08-29T18:45:27Z
[]
[]
freedomofpress/securedrop
4,735
freedomofpress__securedrop-4735
[ "4734" ]
ae0ff0c36b80062097826852b02f667243c45a16
diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py --- a/securedrop/management/submissions.py +++ b/securedrop/management/submissions.py @@ -9,11 +9,14 @@ from db import db from rm import secure_delete -from models import Source, Submission +from models import Reply, Source, Submission from management import app_context def find_disconnected_db_submissions(path): + """ + Finds Submission records whose file does not exist. + """ submissions = db.session.query(Submission).order_by(Submission.id, Submission.filename).all() files_in_fs = {} @@ -50,7 +53,7 @@ def list_disconnected_db_submissions(args): if disconnected_submissions: print( 'Run "manage.py delete-disconnected-db-submissions" to delete these records.', - file=sys.stderr + file=sys.stderr, ) for s in disconnected_submissions: print(s.id) @@ -78,8 +81,14 @@ def delete_disconnected_db_submissions(args): def find_disconnected_fs_submissions(path): + """ + Finds files in the store that lack a Submission or Reply record. + """ submissions = Submission.query.order_by(Submission.id, Submission.filename).all() - files_in_db = {s.filename: s for s in submissions} + files_in_db = {s.filename: True for s in submissions} + + replies = Reply.query.order_by(Reply.id, Reply.filename).all() + files_in_db.update({r.filename: True for r in replies}) files_in_fs = {} for directory, subdirs, files in os.walk(path): @@ -99,7 +108,7 @@ def find_disconnected_fs_submissions(path): def check_for_disconnected_fs_submissions(args): """ - Check for files without a corresponding Submission record in the database. + Check for files without a corresponding Submission or Reply record in the database. """ with app_context(): disconnected = find_disconnected_fs_submissions(args.store_dir) @@ -114,14 +123,14 @@ def check_for_disconnected_fs_submissions(args): def list_disconnected_fs_submissions(args): """ - List files without a corresponding Submission record in the database. + List files without a corresponding Submission or Reply record in the database. """ with app_context(): disconnected_files = find_disconnected_fs_submissions(args.store_dir) if disconnected_files: print( 'Run "manage.py delete-disconnected-fs-submissions" to delete these files.', - file=sys.stderr + file=sys.stderr, ) for f in disconnected_files: print(f)
diff --git a/securedrop/tests/test_submission_cleanup.py b/securedrop/tests/test_submission_cleanup.py --- a/securedrop/tests/test_submission_cleanup.py +++ b/securedrop/tests/test_submission_cleanup.py @@ -13,7 +13,7 @@ def test_delete_disconnected_db_submissions(journalist_app, config): Test that Submission records without corresponding files are deleted. """ with journalist_app.app_context(): - source, codename = utils.db_helper.init_source() + source, _ = utils.db_helper.init_source() source_id = source.id # make two submissions @@ -43,7 +43,7 @@ def test_delete_disconnected_fs_submissions(journalist_app, config): """ Test that files in the store without corresponding Submission records are deleted. """ - source, codename = utils.db_helper.init_source() + source, _ = utils.db_helper.init_source() # make two submissions utils.db_helper.submit(source, 2) @@ -51,6 +51,11 @@ def test_delete_disconnected_fs_submissions(journalist_app, config): submission_filename = source.submissions[0].filename disconnect_path = os.path.join(config.STORE_DIR, source_filesystem_id, submission_filename) + # make two replies, to make sure that their files are not seen + # as disconnects + journalist, _ = utils.db_helper.init_journalist("Mary", "Lane") + utils.db_helper.reply(journalist, source, 2) + # delete the first Submission record db.session.delete(source.submissions[0]) db.session.commit()
New submission cleanup could delete valid files of replies ## Description In `securedrop/management/submissions.py`, the `find_disconnected_fs_submissions` returns any file found in the store that is not associated with a `Submission` record in the database. It should only return files that have neither a `Submission` nor `Reply` record, to avoid warning admins about and possibly deleting replies. ## Steps to Reproduce - Run `make dev` - In another shell: - Run `docker container ls` to get the dev container's ID - Run `docker exec -it "container-id" bash` - Run `./manage.py list-disconnected-fs-submissions` ## Expected Behavior With a consistent database and filesystem, no disconnects should be reported. ## Actual Behavior The files of all replies in the database are reported as candidates for cleanup. ## Comments The function simply needs to build a list of reply files as it does for submissions, and check the files under the store against the complete set of valid files.
2019-09-03T21:42:21Z
[]
[]
freedomofpress/securedrop
4,835
freedomofpress__securedrop-4835
[ "4724" ]
e3ae6d3ddc3c055cade99138422f4ff127e69e45
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.14.0' +version = '1.0.0' # The full version, including alpha/beta/rc tags. -release = '0.14.0' +release = '1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '1.0.0~rc4' +__version__ = '1.0.0' diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setuptools.setup( name="securedrop-app-code", - version="1.0.0~rc4", + version="1.0.0", author="Freedom of the Press Foundation", author_email="[email protected]", description="SecureDrop Server",
diff --git a/molecule/builder-xenial/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml --- a/molecule/builder-xenial/tests/vars.yml +++ b/molecule/builder-xenial/tests/vars.yml @@ -1,5 +1,5 @@ --- -securedrop_version: "1.0.0~rc4" +securedrop_version: "1.0.0" ossec_version: "3.0.0" keyring_version: "0.1.3" config_version: "0.1.3"
Release SecureDrop 1.0.0 This is a tracking issue for the upcoming release of SecureDrop 1.0.0 - tasks may get added or modified. **String and feature freeze:** 2019-08-29 (1700 PDT) **String comment period:** 2019-08-29 (1700 PDT) to 2019-09-02 (2000 PDT) **Feature freeze:** 2019-08-30 (1700 PDT) (release branch will be cut AM Eastern time on September 3rd) **Translation period:** 2019-09-03 (1700 PDT) to 2019-09-15 (1700 PDT) **Pre-release announcement:** 2019-09-10 **Translation freeze:** 2019-09-15 (1700 PDT) **Release date:** 2019-09-17 **Release manager:** @kushaldas **Deputy release manager:** @emkll **Localization manager:** @rmol **Deputy localization manager:** @redshiftzero _SecureDrop maintainers and testers:_ As you QA 1.0.0, please report back your testing results as comments on this ticket. File GitHub issues for any problems found, tag them "QA: Release", and associate them with the 1.0.0 milestone for tracking (or ask a maintainer to do so). Test debian packages will be posted on https://apt-test.freedom.press signed with [the test key](https://gist.githubusercontent.com/conorsch/ec4008b111bc3142fca522693f3cce7e/raw/2968621e8ad92db4505a31fcc5776422d7d26729/apt-test%2520apt%2520pubkey). An Ansible playbook testing the upgrade path is [here](https://gist.github.com/conorsch/e7556624df59b2a0f8b81f7c0c4f9b7d). # [QA Matrix for 1.0.0](https://docs.google.com/spreadsheets/d/1mZMwG2Bn_MaRJMYaoyRKt7S5Q9ZOOvitdW2TIMPDQu0/edit#gid=0) # [Test Plan for 1.0.0](https://github.com/freedomofpress/securedrop/wiki/1.0.0-Test-Plan) # Prepare release candidate (1.0.0~rc1) - [x] Prepare 1.0.0-rc1 release changelog - @kushaldas and @redshiftzero - [x] Branch off release/1.0.0 - @redshiftzero - [x] Prepare 1.0.0~rc1 - @redshiftzero - [x] Build debs and put up `1.0.0~rc1` on test apt server (this is now done via a PR into [this repository](https://github.com/freedomofpress/securedrop-dev-packages-lfs)) - @emkll # Other pre-release tasks - [x] Prepare and distribute pre-release messaging - @eloquence # Prepare release candidate (1.0.0~rc2) - [x] Prepare 1.0.0-rc2 release changelog - @kushaldas - [x] Prepare 1.0.0~rc2 - @kushaldas - [x] Build debs and put up `1.0.0~rc2` on test apt server - @kushaldas *Note:* For SecureDrop 1.0.0, we will cut at _least_ two release candidates. Additional release candidates may follow if issues are found in rc2. # Prepare release candidate (1.0.0~rc3) - [x] Prepare 1.0.0-rc3 release changelog - @emkll - [x] Prepare 1.0.0~rc3 - @kushaldas - [x] Build debs and put up `1.0.0~rc3` on test apt server - @kushaldas After each test, please update the QA matrix and post details for Basic Server Testing, Application Acceptance Testing and 1.0.0-specific testing below in comments to this ticket. # Final release - [x] Ensure builder in release branch is updated and/or update builder image - @emkll - [x] Merge final translations - @rmol - [x] Push signed tag - @emkll - [x] Build final Debian packages for 1.0.0 - @conorsch - [ ] Upload package build logs to wiki - @conorsch - [x] Upload Debian packages to apt QA server - @conorsch - [x] Pre-Flight: Test install and upgrade (both cron-apt on Trusty, and Ansible on Xenial) of 1.0.0 works w/ prod repo debs, test updater logic in Tails - @zenmonkeykstop, @emkll, @rmol - [x] Prepare and distribute release messaging - @eloquence # Post release - [x] Merge changelog back to `develop` @kushaldas https://github.com/freedomofpress/securedrop/pull/4840 - [x] Bump version on `develop` in prep for 1.1.0 release @kushaldas https://github.com/freedomofpress/securedrop/pull/4840 - [x] Update upgrade testing boxes @emkll https://github.com/freedomofpress/securedrop/pull/4857 - [x] Update roadmap wiki page @emkll
## Clean install - VMs with V3 onion URLs (Complete) ### Environment - Install target: VMs - Tails version: 3.16 - Test Scenario: Clean install - SSH over Tor: Yes - Onion service version: V3 - Release candidate: RC2 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1mZMwG2Bn_MaRJMYaoyRKt7S5Q9ZOOvitdW2TIMPDQu0/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.14.0 and restore this backup on 1.0.0 - [x] "Send Test OSSEC Alert" button in the journalist triggers an OSSEC alert and an email is sent. ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 1.0.0-specific changes Note that it is not expected that a single tester test each one of the Tor onion services scenarios, please just indicate which scenarios you covered in the comment on the release ticket and the row at the end of the QA matrix (please fill the QA matrix in as you begin QA such that work is not duplicated). From a 1.0.0 install: #### Tor onion services: upgrade to v2 - [ ] Do not rerun `./securedrop-admin sdconfig`. Using the same `site-specific` as from before your upgrade to 1.0.0, run `./securedrop-admin install`. V2 should still be enabled, and v3 should not be enabled. #### Tor onion services: upgrade to v2+v3 Precondition: * Save the site-specific from v2 only. This will be used in a test towards the end of this section. * Perform a backup on v2. - [ ] rerun `./securedrop-admin sdconfig` to enable v2 and v3 onion services, and then do `./securedrop-admin install`. Then run `securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts have working v3 onion address. - [ ] Now disable SSH over Tor, rerun `securedrop-admin install` and `./securedrop-admin tailsconfig`: - [ ] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts - [ ] Verify that `ssh app` and `ssh mon` work as expected. - [ ] Use `make self-signed-https-certs` to generate self-signed certificates for testing, and run `./securedrop-admin sdconfig` enabling HTTPS: - [ ] Verify that a warning is shown to the user indicating that they should update their certificate prior to sharing their v3 onion URL with users. - [ ] Test multi-admin behavior. Conduct this test step after v3 is enabled on the server: - [ ] Back up `site-specific`, and copy the version from before the upgrade into place instead. Re-run `./securedrop-admin install`, and verify that it fails with a user-friendly message, due to `v3_onion_services=False`. - [ ] Restore the v3 `site-specific` and move the `tor_v3_keys.json` file out of `install_files/ansible-base`. Re-run `./securedrop-admin install` and verify that it fails with a message due to the missing keys file. - [ ] Restore the backup from v2. The v3 onions should not be disabled. - [ ] Now run the backup and restore again to a new v2+v3 install. The v3 onions should be enabled. #### Tor onion service: v3 only, no HTTPS - [x] rerun `./securedrop-admin sdconfig` to enable only v3 onion services, and then do `./securedrop-admin install`. Now run `./securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts has working v3 onion address. (#4708, #4677) - [x] Now disable SSH over Tor, and re-run `./securedrop-admin install`: - [x] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts - [x] Verify that `ssh app` and `ssh mon` work as expected. - [x] Run backup and restore to a new install. The v3 onions should be enabled. :exclamation: Note: I don't recall the install run throwing a timeout when running ./securedrop-admin install to disable ssh over tor: ``` RUNNING HANDLER [tor-hidden-services : Waiting for SSH connection (slow)...] *********************************** fatal: [mon -> localhost]: FAILED! => {"changed": false, "elapsed": 300, "msg": "Timeout when waiting for search string OpenSSH in myawesomev3onionurl1.onion:22"} fatal: [app -> localhost]: FAILED! => {"changed": false, "elapsed": 301, "msg": "Timeout when waiting for search string OpenSSH in myawesomev3onionurl2.onion:22"} ``` #### Tor onion service: adding v3 interfaces with SSH over LAN - [ ] From a v2-only instance using SSH over LAN, upgrade to v3 only. You should continue to be able to SSH over LAN, and the v2 and v3 source and journalist interfaces should be available. #### Deletion functionality - [ ] Upgrade test: Prior to upgrading to 1.0.0, run the QA loader: https://docs.securedrop.org/en/release-0.14.0/development/database_migrations.html#release-testing-migrations. Then after upgrade ensure that there are no submissions with either NULL sources or sources that do not exist in the database any longer. The corresponding files on disk should also be gone. #### Testing detection and correction of disconnected submissions Visit the source interface and send two messages. First we'll test a disconnected database record. In your `www-data` shell: ``` cd /var/lib/securedrop/store ls -laR ``` You should see the two message files. Remove one with rm. ``` cd /var/www/securedrop ``` - [x] `./manage.py check-disconnected-db-submissions` should report `There are submissions in the database with no corresponding files. Run "manage.py list-disconnected-db-submissions" for details.` - [x] `./manage.py list-disconnected-db-submissions` should list the ID of the deleted submission, e.g. 2. - [x] `./manage.py delete-disconnected-db-submissions` should prompt you with `Enter 'y' to delete all submissions missing files:` -- reply y and you should see `Removing submission IDs [2]...` (the ID may differ). Now we'll delete the remaining database record and verify that its disconnected file is detected. Still in your `www-data` shell: ``` sqlite3 /var/lib/securedrop/db.sqlite ``` Delete the submission record for the remaining message (substitute your filename): ``` delete from submissions where filename = '1-exhausted_overmantel-msg.gpg'; ``` - [x] `./manage.py check-disconnected-fs-submissions` should report `There are files in the submission area with no corresponding records in the database. Run "manage.py list-disconnected-fs-submissions" for details..` - [x] `./manage.py list-disconnected-fs-submissions` should show a list like: `/var/lib/securedrop/store/B3A5GPU4OHPQK736R76HKJUP5VONIOMKZLXK77GPTGNW7EJ63AY5YBX27P3DB2X4DZBXPX3LGBBXAJZYG3HQRHE4B6UE5YYBPGDYZOA=/1-exhausted_overmantel-msg.gpg` - [x] `./manage.py delete-disconnected-fs-submissions` should prompt you to delete that file. Do so and it should be deleted. #### Testing automatic requeuing of interrupted deletions Establish two SSH connections to the app server. In one, become root with `sudo su -` and in the other become www-data with `sudo -u www-data` bash. In the `www-data` shell: Activate the securedrop-app-code virtualenv: `. /opt/venvs/securedrop-app-code/bin/activate` `cd /var/www/securedrop` Create a big file that will take a while to delete: `dd if=/dev/zero of=/var/lib/securedrop/store/bigfile bs=1M count=1000` Submit a job to delete it: ``` python3 >>> import rm >>> import worker >>> q = worker.create_queue() >>> q.enqueue(rm.secure_delete, "/var/lib/securedrop/store/bigfile") ``` Exit Python. In the root shell: Reboot, then reconnect. Look at the rqrequeue log: less /var/log/securedrop_worker/rqrequeue.err -- at the end you should see lines like this: ``` 2019-08-08 17:31:01,118 INFO Running every 60 seconds. 2019-08-08 17:31:01,141 INFO Requeuing job <Job 1082e71f-7581-448c-b84b-027e55b4ef8e: rm.secure_delete('/var/lib/securedrop/store/bigfile')> 2019-08-08 17:32:01,192 INFO Skipping job 1082e71f-7581-448c-b84b-027e55b4ef8e, which is already being run by worker rq:worker:6a6b548310f948e291fa954743b8094f ``` That indicates the interrupted job was found and restarted, but was left alone at the next check because it was already running. The job should run to completion, `/var/lib/securedrop/store/bigfile` should be deleted, and the rqrequeue log should start saying: ``` 2019-08-08 17:33:01,253 INFO No interrupted jobs found in started job registry. ``` - [x] Verified the requeue behavior #### Testing OSSEC reporting of disconnects Create a file under `/var/lib/securedrop/store` with `touch /var/lib/securedrop/store/testfile`. If you don't feel like waiting a day for the OSSEC report, you can edit `/var/ossec/etc/ossec.conf`, look for `check-disconnect`, and reduce the `<frequency>`, then `service ossec restart`. - [x] An OSSEC alert was sent indicating a disconnect had occurred. :exclamation: The alert is Level1, perhaps we should consider changing the level? - [x] I otherwise got no manage.py notifications about this functionality. #### Miscellaneous other changes - [x] Python 2 should not be used anywhere on the system. Inspect the version of python that is used by running `ps -aux | grep python` and verify that `/opt/venvs/securedrop-app-code/bin/python` is used instead of `/usr/bin/python`. :exclamation: supervisor uses system python but that's because it's managed upstream: ``` root 1295 0.1 2.5 59068 12416 ? Ss 15:47 0:10 /usr/bin/python /usr/bin/supervisord -n -c /etc/supervisor/supervisord.conf www-data 1502 0.0 3.1 70864 15548 ? S 15:47 0:02 /opt/venvs/securedrop-app-code/bin/python /opt/venvs/securedrop-app-code/bin/rqworker www-data 1503 0.1 2.9 70884 14452 ? S 15:47 0:10 /opt/venvs/securedrop-app-code/bin/python /var/www/securedrop/scripts/rqrequeue --interval 60 vagrant 10038 0.0 0.1 11284 896 pts/1 S+ 17:55 0:00 grep --color=auto python ``` - [ ] Journalist notifications continue to work as expected on 1.0.0. - [x] Check that both app and mon servers are running Tor version `0.4.x` (#4658). - [x] Login as a journalist, and via another admin account reset the password of the journalist, this should invalidate the journalist session, and the journalist must relogin (#4679) - [x] There are no linux-image generic packages installed (non-grsec kernels) (`apt list --installed | grep linux-image`) (#4641) - [x] Shortly after uploading a file via the Source Interface, a checksum is added to the `submissions` table (managed via `rq`) WIP pre-release messaging here, first comments welcome: https://docs.google.com/document/d/1Rc7Z-WsFZUWjTaDGta2_lz4MBo51Tb4Z49GQPPOSM4I/edit# # QA plan - NUC5s - NUC7s - Mac Minis - 1U servers in SF ## 1.0.0 QA Checklist For both upgrades and fresh installs, here is a list of functionality that requires testing. You can use this for copy/pasting into your QA report. Feel free to edit this message to update the plan as appropriate. If you have submitted a QA report already for a 1.0.0 release candidate with successful basic server testing and application acceptance testing sections, then you can skip these sections in subsequent reports, unless otherwise indicated by the Release Manager. This is to ensure that you focus your QA effort on the 1.0.0-specific changes as well as changes since the previous release candidate. ### Environment - Install target: NUC5 - Tails version: 3.16 - Test Scenario: - SSH over Tor: No - Onion service version: v2 + v3 - Release candidate: rc2 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [ ] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1mZMwG2Bn_MaRJMYaoyRKt7S5Q9ZOOvitdW2TIMPDQu0/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [x] If doing upgrade testing, make a backup on 0.14.0 and restore this backup on 1.0.0 - [x] "Send Test OSSEC Alert" button in the journalist triggers an OSSEC alert and an email is sent. ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [x] Updating occurs without issue ### 1.0.0-specific changes Note that it is not expected that a single tester test each one of the Tor onion services scenarios, please just indicate which scenarios you covered in the comment on the release ticket and the row at the end of the QA matrix (please fill the QA matrix in as you begin QA such that work is not duplicated). From a 1.0.0 install: #### Tor onion services: upgrade to v2 - [ ] Do not rerun `./securedrop-admin sdconfig`. Using the same `site-specific` as from before your upgrade to 1.0.0, run `./securedrop-admin install`. V2 should still be enabled, and v3 should not be enabled. #### Tor onion services: upgrade to v2+v3 Precondition: * Save the site-specific from v2 only. This will be used in a test towards the end of this section. * Perform a backup on v2. - [x] rerun `./securedrop-admin sdconfig` to enable v2 and v3 onion services, and then do `./securedrop-admin install`. Then run `securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts have working v3 onion address. - [] Now disable SSH over Tor, rerun `securedrop-admin install` and `./securedrop-admin tailsconfig`: - [ ] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts - [ ] Verify that `ssh app` and `ssh mon` work as expected. - [x] Use `make self-signed-https-certs` to generate self-signed certificates for testing, and run `./securedrop-admin sdconfig` enabling HTTPS: - [x] Verify that a warning is shown to the user indicating that they should update their certificate prior to sharing their v3 onion URL with users. - [ ] Test multi-admin behavior. Conduct this test step after v3 is enabled on the server: - [ ] Back up `site-specific`, and copy the version from before the upgrade into place instead. Re-run `./securedrop-admin install`, and verify that it fails with a user-friendly message, due to `v3_onion_services=False`. - [x] Restore the v3 `site-specific` and move the `tor_v3_keys.json` file out of `install_files/ansible-base`. Re-run `./securedrop-admin install` and verify that it fails with a message due to the missing keys file. - [ ] Restore the backup from v2. The v3 onions should not be disabled. - [x] Now run the backup and restore again to a new v2+v3 install. The v3 onions should be enabled. #### Tor onion service: v3 only, no HTTPS - [ ] rerun `./securedrop-admin sdconfig` to enable only v3 onion services, and then do `./securedrop-admin install`. Now run `./securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts has working v3 onion address. (#4708, #4677) - [ ] Now disable SSH over Tor, and re-run `./securedrop-admin install`: - [ ] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts - [ ] Verify that `ssh app` and `ssh mon` work as expected. - [ ] Run backup and restore to a new install. The v3 onions should be enabled. #### Tor onion service: adding v3 interfaces with SSH over LAN - [x] From a v2-only instance using SSH over LAN, upgrade to v3 only. You should continue to be able to SSH over LAN, and the v2 and v3 source and journalist interfaces should be available. #### Deletion functionality - [ ] Upgrade test: Prior to upgrading to 1.0.0, run the QA loader: https://docs.securedrop.org/en/release-0.14.0/development/database_migrations.html#release-testing-migrations. Then after upgrade ensure that there are no submissions with either NULL sources or sources that do not exist in the database any longer. The corresponding files on disk should also be gone. #### Testing detection and correction of disconnected submissions Visit the source interface and send two messages. First we'll test a disconnected database record. In your `www-data` shell: ``` cd /var/lib/securedrop/store ls -laR ``` You should see the two message files. Remove one with rm. ``` cd /var/www/securedrop ``` - [x] `./manage.py check-disconnected-db-submissions` should report `There are submissions in the database with no corresponding files. Run "manage.py list-disconnected-db-submissions" for details.` - [x] `./manage.py list-disconnected-db-submissions` should list the ID of the deleted submission, e.g. 2. - [x] `./manage.py delete-disconnected-db-submissions` should prompt you with `Enter 'y' to delete all submissions missing files:` -- reply y and you should see `Removing submission IDs [2]...` (the ID may differ). Now we'll delete the remaining database record and verify that its disconnected file is detected. Still in your `www-data` shell: ``` sqlite3 /var/lib/securedrop/db.sqlite ``` Delete the submission record for the remaining message (substitute your filename): ``` delete from submissions where filename = '1-exhausted_overmantel-msg.gpg'; ``` - [x] `./manage.py check-disconnected-fs-submissions` should report `There are files in the submission area with no corresponding records in the database. Run "manage.py list-disconnected-fs-submissions" for details..` - [x] `./manage.py list-disconnected-fs-submissions` should show a list like: `/var/lib/securedrop/store/B3A5GPU4OHPQK736R76HKJUP5VONIOMKZLXK77GPTGNW7EJ63AY5YBX27P3DB2X4DZBXPX3LGBBXAJZYG3HQRHE4B6UE5YYBPGDYZOA=/1-exhausted_overmantel-msg.gpg` - [x] `./manage.py delete-disconnected-fs-submissions` should prompt you to delete that file. Do so and it should be deleted. #### Testing automatic requeuing of interrupted deletions Establish two SSH connections to the app server. In one, become root with `sudo su -` and in the other become www-data with `sudo -u www-data bash`. In the `www-data` shell: Activate the securedrop-app-code virtualenv: `. /opt/venvs/securedrop-app-code/bin/activate` `cd /var/www/securedrop` Create a big file that will take a while to delete: `dd if=/dev/zero of=/var/lib/securedrop/store/bigfile bs=1M count=1000` Submit a job to delete it: ``` python3 >>> import rm >>> import worker >>> q = worker.create_queue() >>> q.enqueue(rm.secure_delete, "/var/lib/securedrop/store/bigfile") ``` Exit Python. In the root shell: Reboot, then reconnect. Look at the rqrequeue log: less /var/log/securedrop_worker/rqrequeue.err -- at the end you should see lines like this: ``` 2019-08-08 17:31:01,118 INFO Running every 60 seconds. 2019-08-08 17:31:01,141 INFO Requeuing job <Job 1082e71f-7581-448c-b84b-027e55b4ef8e: rm.secure_delete('/var/lib/securedrop/store/bigfile')> 2019-08-08 17:32:01,192 INFO Skipping job 1082e71f-7581-448c-b84b-027e55b4ef8e, which is already being run by worker rq:worker:6a6b548310f948e291fa954743b8094f ``` That indicates the interrupted job was found and restarted, but was left alone at the next check because it was already running. The job should run to completion, `/var/lib/securedrop/store/bigfile` should be deleted, and the rqrequeue log should start saying: ``` 2019-08-08 17:33:01,253 INFO No interrupted jobs found in started job registry. ``` - [x] Verified the requeue behavior #### Testing OSSEC reporting of disconnects Create a file under `/var/lib/securedrop/store` with `touch /var/lib/securedrop/store/testfile`. If you don't feel like waiting a day for the OSSEC report, you can edit `/var/ossec/etc/ossec.conf`, look for `check-disconnect`, and reduce the `<frequency>`, then `service ossec restart`. - [ ] An OSSEC alert was sent indicating a disconnect had occurred. - [ ] I otherwise got no manage.py notifications about this functionality. #### Miscellaneous other changes - [x] Python 2 should not be used anywhere on the system. Inspect the version of python that is used by running `ps -aux | grep python` and verify that `/opt/venvs/securedrop-app-code/bin/python` is used instead of `/usr/bin/python`. - [x] Journalist notifications continue to work as expected on 1.0.0. - [x] Check that both app and mon servers are running Tor version `0.4.x` (#4658). - [x] Login as a journalist, and via another admin account reset the password of the journalist, this should invalidate the journalist session, and the journalist must relogin (#4679) - [x] There are no linux-image generic packages installed (non-grsec kernels) (`apt list --installed | grep linux-image`) (#4641) - [x] Shortly after uploading a file via the Source Interface, a checksum is added to the `submissions` table (managed via `rq`) ### Preflight - [ ] Ensure the builder image is up-to-date on release day These tests should be performed the day of release prior to live debian packages on apt.freedom.press #### Basic testing - [ ] Install or upgrade occurs without error - [ ] Source interface is available and version string indicates it is 1.0.0 - [ ] A message can be successfully submitted #### Tails - [ ] The updater GUI appears on boot - [ ] The update successfully occurs to 1.0.0 - [ ] After reboot, updater GUI no longer appears ## 1.0.0 QA Checklist ### Environment - Tails version: 4.0-beta2 - Test Scenario: Fresh install on prod VMs - SSH over Tor: On - Onion service version: v2 only - Release candidate: 1.0.0-rc2 ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [ ] I can SSH into both machines over Tor - DID NOT TEST (did most testing via console) - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [ ] [QA Matrix](https://docs.google.com/spreadsheets/d/1mZMwG2Bn_MaRJMYaoyRKt7S5Q9ZOOvitdW2TIMPDQu0/edit#gid=0) checks pass - not true, see QA matrix #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [ ] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - DID NOT TEST - [x] "Send Test OSSEC Alert" button in the journalist triggers an OSSEC alert and an email is sent. ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download” works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [x] The Updater GUI appears on boot - [ ] Updating occurs without issue - I hit a keyserver timeout ### 1.0.0-specific changes From a 1.0.0 install: #### Tor onion services: Fresh install on v2 (used a site-specific from the previous release and did not re-run sdconfig) [I DID NOT TEST] - [ ] rerun `./securedrop-admin sdconfig` to enable v2 and v3 onion services, and then do `./securedrop-admin install`. Then run `securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts have working v3 onion address. - [ ] Use `make self-signed-https-certs` to generate self-signed certificates for testing, and run `./securedrop-admin sdconfig` enabling HTTPS: - [ ] Verify that a warning is shown to the user indicating that they should update their certificate prior to sharing their v3 onion URL with users. - [ ] Restore the v3 `site-specific` and move the `tor_v3_keys.json` file out of `install_files/ansible-base`. Re-run `./securedrop-admin install` and verify that it fails with a message due to the missing keys file. #### Testing detection and correction of disconnected submissions (also made a reply before all this to test #4734) - [x] `./manage.py check-disconnected-db-submissions` should report `There are submissions in the database with no corresponding files. Run "manage.py list-disconnected-db-submissions" for details.` - [x] `./manage.py list-disconnected-db-submissions` should list the ID of the deleted submission, e.g. 2. - [x] `./manage.py delete-disconnected-db-submissions` should prompt you with `Enter 'y' to delete all submissions missing files:` -- reply y and you should see `Removing submission IDs [2]...` (the ID may differ). - [x] `./manage.py check-disconnected-fs-submissions` should report `There are files in the submission area with no corresponding records in the database. Run "manage.py list-disconnected-fs-submissions" for details..` - [x] `./manage.py list-disconnected-fs-submissions` should show a list like: `/var/lib/securedrop/store/B3A5GPU4OHPQK736R76HKJUP5VONIOMKZLXK77GPTGNW7EJ63AY5YBX27P3DB2X4DZBXPX3LGBBXAJZYG3HQRHE4B6UE5YYBPGDYZOA=/1-exhausted_overmantel-msg.gpg` - [x] `./manage.py delete-disconnected-fs-submissions` should prompt you to delete that file. Do so and it should be deleted. #### Testing automatic requeuing of interrupted deletions - [x] Verified the requeue behavior #### Testing OSSEC reporting of disconnects Create a file under `/var/lib/securedrop/store` with `touch /var/lib/securedrop/store/testfile`. If you don't feel like waiting a day for the OSSEC report, you can edit `/var/ossec/etc/ossec.conf`, look for `check-disconnect`, and reduce the `<frequency>`, then `service ossec restart`. - [x] An OSSEC alert was sent indicating a disconnect had occurred. - [x] I otherwise got no manage.py notifications about this functionality. #### Miscellaneous other changes - [x] Python 2 should not be used anywhere on the system. Inspect the version of python that is used by running `ps -aux | grep python` and verify that `/opt/venvs/securedrop-app-code/bin/python` is used instead of `/usr/bin/python` - true except supervisor issue #4783 - [ ] Journalist notifications continue to work as expected on 1.0.0. - DID NOT TEST - [x] Check that both app and mon servers are running Tor version `0.4.x` (#4658). - [x] Login as a journalist, and via another admin account reset the password of the journalist, this should invalidate the journalist session, and the journalist must relogin (#4679) - [x] There are no linux-image generic packages installed (non-grsec kernels) (`apt list --installed | grep linux-image`) (#4641) - [x] Shortly after uploading a file via the Source Interface, a checksum is added to the `submissions` table (managed via `rq`) Pre-release announcement is live: https://securedrop.org/news/securedrop-100-pre-release-announcement/ Tweeted here: https://twitter.com/SecureDrop/status/1171587448360460288 Redmine bulk distribution completed. # QA plan **IN PROGRESS** - NUC5s - NUC7s - Mac Minis - 1U servers in SF ## 1.0.0 QA Checklist For both upgrades and fresh installs, here is a list of functionality that requires testing. You can use this for copy/pasting into your QA report. Feel free to edit this message to update the plan as appropriate. If you have submitted a QA report already for a 1.0.0 release candidate with successful basic server testing and application acceptance testing sections, then you can skip these sections in subsequent reports, unless otherwise indicated by the Release Manager. This is to ensure that you focus your QA effort on the 1.0.0-specific changes as well as changes since the previous release candidate. ### Environment - Install target: Mac Mini - Tails version: 3.1.6 - Test Scenario: fresh - SSH over Tor: yes - Onion service version: v2 initially - Release candidate: rc2 - General notes: ### Basic Server Testing - [x] I can access both the source and journalist interfaces - [x] I can SSH into both machines over Tor - [x] AppArmor is loaded on app - [x] 0 processes are running unconfined - [x] AppArmor is loaded on mon - [x] 0 processes are running unconfined - [x] Both servers are running grsec kernels - [x] iptables rules loaded - [x] OSSEC emails begin to flow after install - [x] OSSEC emails are decrypted to correct key and I am able to decrypt them - [x] [QA Matrix](https://docs.google.com/spreadsheets/d/1mZMwG2Bn_MaRJMYaoyRKt7S5Q9ZOOvitdW2TIMPDQu0/edit#gid=0) checks pass #### Command Line User Generation - [x] Can successfully add admin user and login #### Administration - [x] I have backed up and successfully restored the app server following the documentation here: https://docs.securedrop.org/en/latest/backup_and_restore.html - [ ] If doing upgrade testing, make a backup on 0.14.0 and restore this backup on 1.0.0 - [x] "Send Test OSSEC Alert" button in the journalist triggers an OSSEC alert and an email is sent. **n/a** ### Application Acceptance Testing #### Source Interface ##### Landing page base cases - [x] JS warning bar does not appear when using Security Slider high - [x] JS warning bar does appear when using Security Slider Low ##### First submission base cases - [x] On generate page, refreshing codename produces a new 7-word codename - [x] On submit page, empty submissions produce flashed message - [x] On submit page, short message submitted successfully - [x] On submit page, file greater than 500 MB produces "The connection was reset" in Tor Browser _quickly_ before the entire file is uploaded - [x] On submit page, file less than 500 MB submitted successfully ##### Returning source base cases - [x] Nonexistent codename cannot log in - [x] Empty codename cannot log in - [x] Legitimate codename can log in - [x] Returning user can view journalist replies - need to log into journalist interface to test #### Journalist Interface ##### Login base cases - [x] Can log in with 2FA tokens - [x] incorrect password cannot log in - [x] invalid 2fa token cannot log in - [x] 2fa immediate reuse cannot log in ##### Index base cases - [x] Filter by codename works - [x] Starring and unstarring works - [x] Click select all selects all submissions - [x] Selecting all and clicking "Download" works ##### Individual source page - [x] You can submit a reply and a flashed message and new row appears - [x] You cannot submit an empty reply - [x] Clicking "Delete Source And Submissions" and the source and docs are deleted - [x] You can click on a document and successfully decrypt using application private key ### Basic Tails Testing #### Updater GUI After updating to this release candidate and running `securedrop-admin tailsconfig` - [ ] The Updater GUI appears on boot - [ ] Updating occurs without issue ### 1.0.0-specific changes Note that it is not expected that a single tester test each one of the Tor onion services scenarios, please just indicate which scenarios you covered in the comment on the release ticket and the row at the end of the QA matrix (please fill the QA matrix in as you begin QA such that work is not duplicated). From a 1.0.0 install: #### Tor onion services: upgrade to v2 - [x] Do not rerun `./securedrop-admin sdconfig`. Using the same `site-specific` as from before your upgrade to 1.0.0, run `./securedrop-admin install`. V2 should still be enabled, and v3 should not be enabled. #### Tor onion services: upgrade to v2+v3 Precondition: * Save the site-specific from v2 only. This will be used in a test towards the end of this section. * Perform a backup on v2. - [x] rerun `./securedrop-admin sdconfig` to enable v2 and v3 onion services, and then do `./securedrop-admin install`. Then run `securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts have working v3 onion address. - [x] Now disable SSH over Tor, rerun `securedrop-admin install` and `./securedrop-admin tailsconfig`: - [ ] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts **FAIL, install stalls on "Force reboot" task. On second run, install fails and admin is locked out from SSH.** - [ ] Verify that `ssh app` and `ssh mon` work as expected. **FAIL, not updated because of stall above** - [x] Use `make self-signed-https-certs` to generate self-signed certificates for testing, and run `./securedrop-admin sdconfig` enabling HTTPS: - [x] Verify that a warning is shown to the user indicating that they should update their certificate prior to sharing their v3 onion URL with users. - [x] Test multi-admin behavior. Conduct this test step after v3 is enabled on the server: - [x] Back up `site-specific`, and copy the version from before the upgrade into place instead. Re-run `./securedrop-admin install`, and verify that it fails with a user-friendly message, due to `v3_onion_services=False`. - [x] Restore the v3 `site-specific` and move the `tor_v3_keys.json` file out of `install_files/ansible-base`. Re-run `./securedrop-admin install` and verify that it fails with a message due to the missing keys file. - [ ] Restore the backup from v2. The v3 onions should not be disabled. **FAIL - restore stalls after tor restart, because v3 onions are disabled by the /etc/tor/torrc from the backup** - [x] Now run the backup and restore again to a new v2+v3 install. The v3 onions should be enabled. #### Tor onion service: v3 only, no HTTPS - [x] rerun `./securedrop-admin sdconfig` to enable only v3 onion services, and then do `./securedrop-admin install`. Now run `./securedrop-admin tailsconfig` and check if the source and journalist desktop shortcuts has working v3 onion address. (#4708, #4677) - [ ] Now disable SSH over Tor, and re-run `./securedrop-admin install`: - [ ] Verify that `~/.ssh/config` contains IP addresses rather than Onion service addresses for the `app` and `mon` hosts **FAIL - see #4779** - [ ] Verify that `ssh app` and `ssh mon` work as expected. **FAIL, see #4779** - [x] Run backup and restore to a new install. The v3 onions should be enabled. #### Tor onion service: adding v3 interfaces with SSH over LAN - [x] From a v2-only instance using SSH over LAN, upgrade to v3 only. You should continue to be able to SSH over LAN, and the v2 and v3 source and journalist interfaces should be available. #### Deletion functionality - [ ] Upgrade test: Prior to upgrading to 1.0.0, run the QA loader: https://docs.securedrop.org/en/release-0.14.0/development/database_migrations.html#release-testing-migrations. Then after upgrade ensure that there are no submissions with either NULL sources or sources that do not exist in the database any longer. The corresponding files on disk should also be gone. **n/a** #### Testing detection and correction of disconnected submissions Visit the source interface and send two messages. First we'll test a disconnected database record. In your `www-data` shell: ``` cd /var/lib/securedrop/store ls -laR ``` You should see the two message files. Remove one with rm. ``` cd /var/www/securedrop ``` - [x] `./manage.py check-disconnected-db-submissions` should report `There are submissions in the database with no corresponding files. Run "manage.py list-disconnected-db-submissions" for details.` - [x] `./manage.py list-disconnected-db-submissions` should list the ID of the deleted submission, e.g. 2. - [x] `./manage.py delete-disconnected-db-submissions` should prompt you with `Enter 'y' to delete all submissions missing files:` -- reply y and you should see `Removing submission IDs [2]...` (the ID may differ). Now we'll delete the remaining database record and verify that its disconnected file is detected. Still in your `www-data` shell: ``` sqlite3 /var/lib/securedrop/db.sqlite ``` Delete the submission record for the remaining message (substitute your filename): ``` delete from submissions where filename = '1-exhausted_overmantel-msg.gpg'; ``` - [x] `./manage.py check-disconnected-fs-submissions` should report `There are files in the submission area with no corresponding records in the database. Run "manage.py list-disconnected-fs-submissions" for details..` - [x] `./manage.py list-disconnected-fs-submissions` should show a list like: `/var/lib/securedrop/store/B3A5GPU4OHPQK736R76HKJUP5VONIOMKZLXK77GPTGNW7EJ63AY5YBX27P3DB2X4DZBXPX3LGBBXAJZYG3HQRHE4B6UE5YYBPGDYZOA=/1-exhausted_overmantel-msg.gpg` - [x] `./manage.py delete-disconnected-fs-submissions` should prompt you to delete that file. Do so and it should be deleted. #### Testing automatic requeuing of interrupted deletions Establish two SSH connections to the app server. In one, become root with `sudo su -` and in the other become www-data with `sudo -u www-data bash`. In the `www-data` shell: Activate the securedrop-app-code virtualenv: `. /opt/venvs/securedrop-app-code/bin/activate` `cd /var/www/securedrop` Create a big file that will take a while to delete: `dd if=/dev/zero of=/var/lib/securedrop/store/bigfile bs=1M count=1000` Submit a job to delete it: ``` python3 >>> import rm >>> import worker >>> q = worker.create_queue() >>> q.enqueue(rm.secure_delete, "/var/lib/securedrop/store/bigfile") ``` Exit Python. In the root shell: Reboot, then reconnect. Look at the rqrequeue log: less /var/log/securedrop_worker/rqrequeue.err -- at the end you should see lines like this: ``` 2019-08-08 17:31:01,118 INFO Running every 60 seconds. 2019-08-08 17:31:01,141 INFO Requeuing job <Job 1082e71f-7581-448c-b84b-027e55b4ef8e: rm.secure_delete('/var/lib/securedrop/store/bigfile')> 2019-08-08 17:32:01,192 INFO Skipping job 1082e71f-7581-448c-b84b-027e55b4ef8e, which is already being run by worker rq:worker:6a6b548310f948e291fa954743b8094f ``` That indicates the interrupted job was found and restarted, but was left alone at the next check because it was already running. The job should run to completion, `/var/lib/securedrop/store/bigfile` should be deleted, and the rqrequeue log should start saying: ``` 2019-08-08 17:33:01,253 INFO No interrupted jobs found in started job registry. ``` - [x] Verified the requeue behavior #### Testing OSSEC reporting of disconnects Create a file under `/var/lib/securedrop/store` with `touch /var/lib/securedrop/store/testfile`. If you don't feel like waiting a day for the OSSEC report, you can edit `/var/ossec/etc/ossec.conf`, look for `check-disconnect`, and reduce the `<frequency>`, then `service ossec restart`. - [x] An OSSEC alert was sent indicating a disconnect had occurred. - [x] I otherwise got no manage.py notifications about this functionality. #### Miscellaneous other changes - [ ] Python 2 should not be used anywhere on the system. Inspect the version of python that is used by running `ps -aux | grep python` and verify that `/opt/venvs/securedrop-app-code/bin/python` is used instead of `/usr/bin/python`. **FAIL - supervisord run by python 2.7** - [x] Journalist notifications continue to work as expected on 1.0.0. - [x] Check that both app and mon servers are running Tor version `0.4.x` (#4658). - [ ] Login as a journalist, and via another admin account reset the password of the journalist, this should invalidate the journalist session, and the journalist must relogin (#4679) **Not tested no separate Admin WS available** - [x] There are no linux-image generic packages installed (non-grsec kernels) (`apt list --installed | grep linux-image`) (#4641) - [x] Shortly after uploading a file via the Source Interface, a checksum is added to the `submissions` table (managed via `rq`) ### Preflight - [ ] Ensure the builder image is up-to-date on release day These tests should be performed the day of release prior to live debian packages on apt.freedom.press #### Basic testing - [ ] Install or upgrade occurs without error - [ ] Source interface is available and version string indicates it is 1.0.0 - [ ] A message can be successfully submitted #### Tails - [ ] The updater GUI appears on boot - [ ] The update successfully occurs to 1.0.0 - [ ] After reboot, updater GUI no longer appears
2019-09-17T17:58:13Z
[]
[]
freedomofpress/securedrop
4,855
freedomofpress__securedrop-4855
[ "4783" ]
ed014bed5f810b71f86c4bd556a6c73679587f53
diff --git a/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py --- a/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py +++ b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py @@ -10,7 +10,6 @@ import os from alembic import op import sqlalchemy as sa -from rm import secure_delete # raise the errors if we're not in production raise_errors = os.environ.get("SECUREDROP_ENV", "prod") != "prod" @@ -19,7 +18,6 @@ from journalist_app import create_app from sdconfig import config from store import NoFileFoundException, TooManyFilesException - from worker import create_queue except ImportError: # This is a fresh install, and config.py has not been created yet. if raise_errors: @@ -63,8 +61,8 @@ def upgrade(): """).bindparams(id=submission.id) ) - file_path = app.storage.path_without_filesystem_id(submission.filename) - create_queue().enqueue(secure_delete, file_path) + path = app.storage.path_without_filesystem_id(submission.filename) + app.storage.move_to_shredder(path) except NoFileFoundException: # The file must have been deleted by the admin, remove the row conn.execute( @@ -85,8 +83,8 @@ def upgrade(): """).bindparams(id=reply.id) ) - file_path = app.storage.path_without_filesystem_id(reply.filename) - create_queue().enqueue(secure_delete, file_path) + path = app.storage.path_without_filesystem_id(reply.filename) + app.storage.move_to_shredder(path) except NoFileFoundException: # The file must have been deleted by the admin, remove the row conn.execute( diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -211,16 +211,13 @@ def download_reply(source_uuid, reply_uuid): @token_required def single_submission(source_uuid, submission_uuid): if request.method == 'GET': - source = get_or_404(Source, source_uuid, column=Source.uuid) - submission = get_or_404(Submission, submission_uuid, - column=Submission.uuid) + get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) return jsonify(submission.to_json()), 200 elif request.method == 'DELETE': - submission = get_or_404(Submission, submission_uuid, - column=Submission.uuid) - source = get_or_404(Source, source_uuid, column=Source.uuid) - utils.delete_file(source.filesystem_id, submission.filename, - submission) + get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) + utils.delete_file_object(submission) return jsonify({'message': 'Submission deleted'}), 200 @api.route('/sources/<source_uuid>/replies', methods=['GET', 'POST']) @@ -290,13 +287,12 @@ def all_source_replies(source_uuid): methods=['GET', 'DELETE']) @token_required def single_reply(source_uuid, reply_uuid): - source = get_or_404(Source, source_uuid, column=Source.uuid) + get_or_404(Source, source_uuid, column=Source.uuid) reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) if request.method == 'GET': return jsonify(reply.to_json()), 200 elif request.method == 'DELETE': - utils.delete_file(source.filesystem_id, reply.filename, - reply) + utils.delete_file_object(reply) return jsonify({'message': 'Reply deleted'}), 200 @api.route('/submissions', methods=['GET']) diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -13,9 +13,7 @@ from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, WrongPasswordException, FirstOrLastNameError, LoginThrottledException, BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) -from rm import secure_delete from store import add_checksum_for_file -from worker import create_queue import typing # https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking @@ -171,16 +169,16 @@ def download(zip_basename, submissions): as_attachment=True) -def delete_file(filesystem_id, filename, file_object): - file_path = current_app.storage.path(filesystem_id, filename) - create_queue().enqueue(secure_delete, file_path) +def delete_file_object(file_object): + path = current_app.storage.path(file_object.source.filesystem_id, file_object.filename) + current_app.storage.move_to_shredder(path) db.session.delete(file_object) db.session.commit() def bulk_delete(filesystem_id, items_selected): for item in items_selected: - delete_file(filesystem_id, item.filename, item) + delete_file_object(item) flash(ngettext("Submission deleted.", "{num} submissions deleted.".format( @@ -260,7 +258,8 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions - job = create_queue().enqueue(secure_delete, current_app.storage.path(filesystem_id)) + path = current_app.storage.path(filesystem_id) + current_app.storage.move_to_shredder(path) # Delete the source's reply keypair current_app.crypto_util.delete_reply_keypair(filesystem_id) @@ -269,7 +268,6 @@ def delete_collection(filesystem_id): source = get_source(filesystem_id) db.session.delete(source) db.session.commit() - return job def set_name(user, first_name, last_name): diff --git a/securedrop/rm.py b/securedrop/rm.py --- a/securedrop/rm.py +++ b/securedrop/rm.py @@ -52,7 +52,7 @@ def shred(path, delete=True): def secure_delete(path): - # type: (str) -> str + # type: (str) -> None """ Securely deletes the file at ``path``. @@ -86,9 +86,6 @@ def secure_delete(path): for d in reversed(sorted(directories_to_remove)): os.rmdir(d) - # We need to return a non-`None` value so the rq worker writes this back to Redis - return "success" - def check_secure_delete_capability(): # type: () -> bool diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -14,6 +14,7 @@ from secure_tempfile import SecureTemporaryFile +import rm from worker import create_queue @@ -32,8 +33,8 @@ VALIDATE_FILENAME = re.compile( - r"^(?P<index>\d+)\-[a-z0-9-_]*" - r"(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match + r"^(?P<index>\d+)\-[a-z0-9-_]*(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$" +).match class PathException(Exception): @@ -83,45 +84,66 @@ def __init__(self, storage_path, temp_dir, gpg_key): self.__gpg_key = gpg_key - def verify(self, p): - # type: (str) -> bool - """Assert that the path is absolute, normalized, inside - `self.__storage_path`, and matches the filename format. + # where files and directories are sent to be securely deleted + self.__shredder_path = os.path.abspath(os.path.join(self.__storage_path, "../shredder")) + if not os.path.exists(self.__shredder_path): + os.makedirs(self.__shredder_path, mode=0o700) + + @property + def storage_path(self): + return self.__storage_path + + @property + def shredder_path(self): + return self.__shredder_path + + def shredder_contains(self, path: str) -> bool: + """ + Returns True if the fully-resolved path lies within the shredder. + """ + common_path = os.path.commonpath((os.path.realpath(path), self.__shredder_path)) + return common_path == self.__shredder_path + + def store_contains(self, path: str) -> bool: """ + Returns True if the fully-resolved path lies within the store. + """ + common_path = os.path.commonpath((os.path.realpath(path), self.__storage_path)) + return common_path == self.__storage_path + + def verify(self, p: str) -> bool: + """ + Verify that a given path is valid for the store. + """ + + if self.store_contains(p): + # verifying a hypothetical path + if not os.path.exists(p): + return True - # os.path.abspath makes the path absolute and normalizes - # '/foo/../bar' to '/bar', etc. We have to check that the path is - # normalized before checking that it starts with the - # `self.__storage_path` or else a malicious actor could append a - # bunch of '../../..' to access files outside of the store. - if not p == os.path.abspath(p): - raise PathException("The path is not absolute and/or normalized") - - # Check that the path p is in self.__storage_path - if os.path.relpath(p, self.__storage_path).startswith('..'): - raise PathException("Invalid directory %s" % (p, )) - - if os.path.isfile(p): - filename = os.path.basename(p) - ext = os.path.splitext(filename)[-1] - if filename == '_FLAG': + # extant paths must be directories or correctly-named plain files + if os.path.isdir(p): return True - if ext != '.gpg': - # if there's an extension, verify it's a GPG - raise PathException("Invalid file extension %s" % (ext, )) - if not VALIDATE_FILENAME(filename): - raise PathException("Invalid filename %s" % (filename, )) - return False + if os.path.isfile(p) and VALIDATE_FILENAME(os.path.basename(p)): + return True - def path(self, *s): - # type: (*str) -> str - """Get the normalized, absolute file path, within - `self.__storage_path`. + raise PathException("Path not valid in store: {}".format(p)) + + def path(self, filesystem_id: str, filename: str = '') -> str: + """ + Returns the path resolved within `self.__storage_path`. + + Raises PathException if `verify` doesn't like the path. """ - joined = os.path.join(os.path.abspath(self.__storage_path), *s) - absolute = os.path.abspath(joined) - self.verify(absolute) + joined = os.path.join(os.path.realpath(self.__storage_path), filesystem_id, filename) + absolute = os.path.realpath(joined) + if not self.verify(absolute): + raise PathException( + """Could not resolve ("{}", "{}") to a path within the store.""".format( + filesystem_id, filename + ) + ) return absolute def path_without_filesystem_id(self, filename): @@ -132,7 +154,7 @@ def path_without_filesystem_id(self, filename): """ joined_paths = [] - for rootdir, _, files in os.walk(os.path.abspath(self.__storage_path)): + for rootdir, _, files in os.walk(os.path.realpath(self.__storage_path)): for file_ in files: if file_ in filename: joined_paths.append(os.path.join(rootdir, file_)) @@ -144,7 +166,10 @@ def path_without_filesystem_id(self, filename): else: absolute = joined_paths[0] - self.verify(absolute) + if not self.verify(absolute): + raise PathException( + """Could not resolve "{}" to a path within the store.""".format(filename) + ) return absolute def get_bulk_archive(self, selected_submissions, zip_directory=''): @@ -164,9 +189,7 @@ def get_bulk_archive(self, selected_submissions, zip_directory=''): submissions = [s for s in selected_submissions if s.source.journalist_designation == source] for submission in submissions: - filename = self.path(submission.source.filesystem_id, - submission.filename) - self.verify(filename) + filename = self.path(submission.source.filesystem_id, submission.filename) document_number = submission.filename.split('-')[0] if zip_directory == submission.source.journalist_filename: fname = zip_directory @@ -180,6 +203,79 @@ def get_bulk_archive(self, selected_submissions, zip_directory=''): )) return zip_file + def move_to_shredder(self, path: str): + """ + Moves content from the store to the shredder for secure deletion. + + Python's os.renames (and the underlying rename(2) calls) will + silently overwrite content, which could bypass secure + deletion, so we create a temporary directory under the + shredder directory and move the specified content there. + + This function is intended to be atomic and quick, for use in + deletions via the UI and API. The actual secure deletion is + performed by an asynchronous process that monitors the + shredder directory. + """ + if not self.verify(path): + raise ValueError( + """Path is not within the store: "{}" """.format(path) + ) + + if not os.path.exists(path): + raise ValueError( + """Path does not exist: "{}" """.format(path) + ) + + relpath = os.path.relpath(path, start=self.storage_path) + dest = os.path.join(tempfile.mkdtemp(dir=self.__shredder_path), relpath) + current_app.logger.info("Moving {} to shredder: {}".format(path, dest)) + os.renames(path, dest) + + def clear_shredder(self): + current_app.logger.info("Clearing shredder") + directories = [] + targets = [] + for directory, subdirs, files in os.walk(self.shredder_path): + for subdir in subdirs: + real_subdir = os.path.realpath(os.path.join(directory, subdir)) + if self.shredder_contains(real_subdir): + directories.append(real_subdir) + for f in files: + abs_file = os.path.abspath(os.path.join(directory, f)) + if os.path.islink(abs_file): + # Somehow, a symbolic link was created in the + # store. This shouldn't happen in normal + # operations. Just remove the link; don't try to + # shred its target. Note that we only have special + # handling for symlinks. Hard links -- which + # again, shouldn't occur in the store -- will + # result in the file data being shredded once for + # each link. + current_app.logger.info( + "Deleting link {} to {}".format( + abs_file, os.readlink(abs_file) + ) + ) + os.unlink(abs_file) + continue + if self.shredder_contains(abs_file): + targets.append(abs_file) + + target_count = len(targets) + current_app.logger.info("Files to delete: {}".format(target_count)) + for i, t in enumerate(targets, 1): + current_app.logger.info("Securely deleting file {}/{}: {}".format(i, target_count, t)) + rm.secure_delete(t) + current_app.logger.info("Securely deleted file {}/{}: {}".format(i, target_count, t)) + + directories_to_remove = set(directories) + dir_count = len(directories_to_remove) + for i, d in enumerate(reversed(sorted(directories_to_remove)), 1): + current_app.logger.debug("Removing directory {}/{}: {}".format(i, dir_count, d)) + os.rmdir(d) + current_app.logger.debug("Removed directory {}/{}: {}".format(i, dir_count, d)) + def save_file_submission(self, filesystem_id, count, journalist_filename, filename, stream): # type: (str, int, str, str, BufferedIOBase) -> str
diff --git a/molecule/testinfra/staging/app-code/test_redis_worker.py b/molecule/testinfra/staging/app-code/test_redis_worker.py deleted file mode 100644 --- a/molecule/testinfra/staging/app-code/test_redis_worker.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest -import re - - -testinfra_hosts = ["app-staging"] -securedrop_test_vars = pytest.securedrop_test_vars - - [email protected]( - "config_line", - [ - "[program:securedrop_worker]", - "command=/opt/venvs/securedrop-app-code/bin/rqworker", - "directory={}".format(securedrop_test_vars.securedrop_code), - ( - 'environment=PYTHONPATH="/var/www/securedrop:' - '/opt/venvs/securedrop-app-code/lib/python3.5/site-packages"' - ), - "autostart=true", - "autorestart=true", - "startretries=3", - "stderr_logfile=/var/log/securedrop_worker/rqworker.err", - "stdout_logfile=/var/log/securedrop_worker/rqworker.out", - "user={}".format(securedrop_test_vars.securedrop_user), - ], -) -def test_redis_worker_configuration(host, config_line): - """ - Ensure SecureDrop Redis worker config for supervisor service - management is configured correctly. - """ - f = host.file("/etc/supervisor/conf.d/securedrop_worker.conf") - # Config lines may have special characters such as [] which will - # throw off the regex matching, so let's escape those chars. - regex = re.escape(config_line) - assert f.contains("^{}$".format(regex)) - - -def test_redis_worker_config_file(host): - """ - Ensure SecureDrop Redis worker config for supervisor service - management has proper ownership and mode. - - Using separate test so that the parametrization doesn't rerun - the file mode checks, which would be useless. - """ - f = host.file("/etc/supervisor/conf.d/securedrop_worker.conf") - assert f.is_file - assert f.mode == 0o644 - assert f.user == "root" - assert f.group == "root" diff --git a/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py b/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py deleted file mode 100644 --- a/molecule/testinfra/staging/app-code/test_rqrequeue_conf.py +++ /dev/null @@ -1,52 +0,0 @@ -import pytest -import re - - -testinfra_hosts = ["app-staging"] -securedrop_test_vars = pytest.securedrop_test_vars - - [email protected]( - "config_line", - [ - "[program:securedrop_rqrequeue]", - ( - "command=/opt/venvs/securedrop-app-code/bin/python " - "/var/www/securedrop/scripts/rqrequeue --interval 60" - ), - "directory={}".format(securedrop_test_vars.securedrop_code), - ( - 'environment=PYTHONPATH="/var/www/securedrop:' - '/opt/venvs/securedrop-app-code/lib/python3.5/site-packages"' - ), - "autostart=true", - "autorestart=true", - "startretries=3", - "stderr_logfile=/var/log/securedrop_worker/rqrequeue.err", - "stdout_logfile=/var/log/securedrop_worker/rqrequeue.out", - "user={}".format(securedrop_test_vars.securedrop_user), - ], -) -def test_rqrequeue_configuration(host, config_line): - """ - Ensure Supervisor config for rqrequeue is correct. - """ - f = host.file("/etc/supervisor/conf.d/securedrop_rqrequeue.conf") - # Config lines may have special characters such as [] which will - # throw off the regex matching, so let's escape those chars. - regex = re.escape(config_line) - assert f.contains("^{}$".format(regex)) - - -def test_rqrequeue_config_file(host): - """ - Check ownership and mode of Supervisor config for rqrequeue. - - Using separate test so that the parametrization doesn't rerun - the file mode checks, which would be useless. - """ - f = host.file("/etc/supervisor/conf.d/securedrop_rqrequeue.conf") - assert f.is_file - assert f.mode == 0o644 - assert f.user == "root" - assert f.group == "root" diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -28,7 +28,6 @@ def test_apache_default_docroot_is_absent(host): 'securedrop-config', 'securedrop-keyring', 'sqlite3', - 'supervisor', ]) def test_securedrop_application_apt_dependencies(host, package): """ diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py b/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py @@ -0,0 +1,50 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_rqrequeue_service(host): + """ + Verify configuration of securedrop_rqrequeue systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_rqrequeue.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop rqrequeue process", + "After=redis-server.service", + "Wants=redis-server.service", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/rqrequeue --interval 60".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_rqrequeue") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py b/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py @@ -0,0 +1,49 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_rqworker_service(host): + """ + Verify configuration of securedrop_rqworker systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_rqworker.service" + + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop rq worker", + "After=redis-server.service", + "Wants=redis-server.service", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/rqworker".format(securedrop_test_vars.securedrop_venv_bin), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_rqworker") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py b/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py @@ -0,0 +1,48 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_shredder_service(host): + """ + Verify configuration of securedrop_shredder systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_shredder.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop shredder", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/shredder --interval 60".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_shredder") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py --- a/molecule/testinfra/staging/app/test_appenv.py +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -48,6 +48,11 @@ def test_app_code_pkg(host): assert host.package("securedrop-app-code").is_installed +def test_supervisor_not_installed(host): + """ ensure supervisor package is not installed """ + assert host.package("supervisor").is_installed is False + + def test_gpg_key_in_keyring(host): """ ensure test gpg key is present in app keyring """ with host.sudo(sdvars.securedrop_user): @@ -71,13 +76,3 @@ def test_securedrop_tmp_clean_cron(host): cronlist = host.run("crontab -l").stdout cronjob = "@daily {}/manage.py clean-tmp".format(sdvars.securedrop_code) assert cronjob in cronlist - - -def test_app_workerlog_dir(host): - """ ensure directory for worker logs is present """ - f = host.file('/var/log/securedrop_worker') - with host.sudo(): - assert f.is_directory - assert f.user == "root" - assert f.group == "root" - assert f.mode == 0o700 diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1689,19 +1689,15 @@ def test_delete_source_deletes_docs_on_disk(journalist_app, utils.db_helper.submit(source, 2) utils.db_helper.reply(journo, source, 2) - # Encrypted documents exists - dir_source_docs = os.path.join(config.STORE_DIR, - test_source['filesystem_id']) + dir_source_docs = os.path.join(config.STORE_DIR, test_source['filesystem_id']) assert os.path.exists(dir_source_docs) - job = journalist_app_module.utils.delete_collection( - test_source['filesystem_id']) + journalist_app_module.utils.delete_collection(test_source['filesystem_id']) - # Wait up to 5s to wait for Redis worker secure deletion to complete - utils.asynchronous.wait_for_redis_worker(job) + def assertion(): + assert not os.path.exists(dir_source_docs) - # Encrypted documents no longer exist - assert not os.path.exists(dir_source_docs) + utils.asynchronous.wait_for_assertion(assertion) def test_login_with_invalid_password_doesnt_call_argon2(mocker, test_journo): diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- +import logging import os import io import pytest import re -import store +import stat import zipfile os.environ['SECUREDROP_ENV'] = 'test' # noqa @@ -12,6 +13,7 @@ from db import db from journalist_app import create_app from models import Submission, Reply +import store from store import Storage, queued_add_checksum_for_file, async_add_checksum_for_file @@ -102,16 +104,29 @@ def test_verify_path_not_absolute(journalist_app, config): def test_verify_in_store_dir(journalist_app, config): with pytest.raises(store.PathException) as e: - journalist_app.storage.verify(config.STORE_DIR + "_backup") - - assert 'Invalid directory' in str(e) + path = config.STORE_DIR + "_backup" + journalist_app.storage.verify(path) + assert e.message == "Path not valid in store: {}".format(path) def test_verify_store_path_not_absolute(journalist_app): with pytest.raises(store.PathException) as e: journalist_app.storage.verify('..') + assert e.message == "Path not valid in store: .." + - assert 'The path is not absolute and/or normalized' in str(e) +def test_verify_rejects_symlinks(journalist_app): + """ + Test that verify rejects paths involving links outside the store. + """ + try: + link = os.path.join(journalist_app.storage.storage_path, "foo") + os.symlink("/foo", link) + with pytest.raises(store.PathException) as e: + journalist_app.storage.verify(link) + assert e.message == "Path not valid in store: {}".format(link) + finally: + os.unlink(link) def test_verify_store_dir_not_absolute(): @@ -130,9 +145,15 @@ def test_verify_store_temp_dir_not_absolute(): assert re.compile('temp_dir.*is not absolute').match(msg) -def test_verify_flagged_file_in_sourcedir_returns_true(journalist_app, config): +def test_verify_regular_submission_in_sourcedir_returns_true(journalist_app, config): + """ + Tests that verify is happy with a regular submission file. + + Verify should return True for a regular file that matches the + naming scheme of submissions. + """ source_directory, file_path = create_file_in_source_dir( - config, 'example-filesystem-id', '_FLAG' + config, 'example-filesystem-id', '1-regular-doc.gz.gpg' ) assert journalist_app.storage.verify(file_path) @@ -148,7 +169,7 @@ def test_verify_invalid_file_extension_in_sourcedir_raises_exception( with pytest.raises(store.PathException) as e: journalist_app.storage.verify(file_path) - assert 'Invalid file extension .txt' in str(e) + assert 'Path not valid in store: {}'.format(file_path) in str(e) def test_verify_invalid_filename_in_sourcedir_raises_exception( @@ -160,8 +181,7 @@ def test_verify_invalid_filename_in_sourcedir_raises_exception( with pytest.raises(store.PathException) as e: journalist_app.storage.verify(file_path) - - assert 'Invalid filename NOTVALID.gpg' in str(e) + assert e.message == 'Path not valid in store: {}'.format(file_path) def test_get_zip(journalist_app, test_source, config): @@ -292,3 +312,74 @@ def test_async_add_checksum_for_file(config, db_model): # requery to get a new object db_obj = db_model.query.filter_by(id=db_obj_id).one() assert db_obj.checksum == 'sha256:' + expected_hash + + +def test_path_configuration_is_immutable(journalist_app): + """ + Check that the store's paths cannot be changed. + + They're exposed via properties that are supposed to be + read-only. It is of course possible to change them via the mangled + attribute names, but we want to confirm that accidental changes + are prevented. + """ + with pytest.raises(AttributeError): + journalist_app.storage.storage_path = "/foo" + + original_storage_path = journalist_app.storage.storage_path[:] + journalist_app.storage.__storage_path = "/foo" + assert journalist_app.storage.storage_path == original_storage_path + + with pytest.raises(AttributeError): + journalist_app.storage.shredder_path = "/foo" + + original_shredder_path = journalist_app.storage.shredder_path[:] + journalist_app.storage.__shredder_path = "/foo" + assert journalist_app.storage.shredder_path == original_shredder_path + + +def test_shredder_configuration(journalist_app): + """ + Ensure we're creating the shredder directory correctly. + + We want to ensure that it's a sibling of the store directory, with + mode 0700. + """ + store_path = journalist_app.storage.storage_path + shredder_path = journalist_app.storage.shredder_path + assert os.path.dirname(shredder_path) == os.path.dirname(store_path) + s = os.stat(shredder_path) + assert stat.S_ISDIR(s.st_mode) is True + assert stat.S_IMODE(s.st_mode) == 0o700 + + +def test_shredder_deletes_symlinks(journalist_app, caplog): + """ + Confirm that `store.clear_shredder` removes any symlinks in the shredder. + """ + caplog.set_level(logging.DEBUG) + + link_target = "/foo" + link = os.path.abspath(os.path.join(journalist_app.storage.shredder_path, "foo")) + os.symlink(link_target, link) + journalist_app.storage.clear_shredder() + assert "Deleting link {} to {}".format(link, link_target) in caplog.text + assert not os.path.exists(link) + + +def test_shredder_shreds(journalist_app, caplog): + """ + Confirm that `store.clear_shredder` removes files. + """ + caplog.set_level(logging.DEBUG) + + testdir = os.path.abspath(os.path.join(journalist_app.storage.shredder_path, "testdir")) + os.makedirs(testdir) + testfile = os.path.join(testdir, "testfile") + with open(testfile, "w") as f: + f.write("testdata\n") + + journalist_app.storage.clear_shredder() + assert "Securely deleted file 1/1: {}".format(testfile) in caplog.text + assert not os.path.isfile(testfile) + assert not os.path.isdir(testdir)
supervisor is using python 2 ## Description Python 2 is used by supervisor on 1.0.0-rc2 installs You can see this by running `ps aux | grep supervisor` or `ps aux | grep python` (I discovered this during exploratory testing where I moved the python 2 interpreter `mv /usr/bin/python /usr/bin/pythontemp` before running the QA checklist to see if any functionality broke due to dependency on Python 2) ## Steps to Reproduce Install on 1.0.0-rc2 ## Expected Behavior Python 2 is not required for any SecureDrop functionality ## Actual Behavior Python 2 is used by supervisor ## Comments Here's my initial investigation of this: * there is a supervisor release on PyPI that supports Python 3: https://github.com/Supervisor/supervisor/issues/1060#issuecomment-480438643 * but we install from apt (supervisor is a dependency of the securedrop-app-code deb) and the supervisor package in xenial https://packages.ubuntu.com/xenial/admin/supervisor is running on an old version that is python 2 only
OK so after discussion async @kushaldas is going to discuss this with a colleague tomorrow. If we don't get clarity after that (to understand what the policy is with Xenial LTS packages that currently require python 2) we'll file upstream. In my view any dependency we are using in production SecureDrop instances that currently _requires_ the use of Python 2 we need to do one of the following for: * Verify that upstream will be updating to a version requiring Python 3 instead of Python 2 well in advance of the Python 2 EOL * Package it ourselves, or see if we can install using e.g. pip instead of apt * See if we can remove the requirement for the tool Per chat w/ @redshiftzero removing from 1.0.0 milestone (we won't resolve this by next week and it's not a release blocker), but the above follow-up should be done at a high priority. The `supervisor` package is in the `universe` section, so I'd guess that Ubuntu won't be replacing it with a Python 3 version. It would not be difficult to install it in our virtualenv. We could start it with a `@reboot` cron job under `www-data`, so losing systemd oversight shouldn't be a problem: worst case it would be restarted with the daily reboot. I suppose we could just run the rq worker and requeuer programs the same way; it's nice to have supervisor controlling them, but it should be possible to remove it altogether.
2019-09-20T18:43:04Z
[]
[]
freedomofpress/securedrop
4,859
freedomofpress__securedrop-4859
[ "4699" ]
ea1e18eb09b59b4de7b786aabeac76b257907c6c
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -4,7 +4,6 @@ import pretty_bad_protocol as gnupg import os import io -import six import scrypt from random import SystemRandom @@ -278,9 +277,7 @@ def decrypt(self, secret, ciphertext): salt=self.scrypt_gpg_pepper) data = self.gpg.decrypt(ciphertext, passphrase=hashed_codename).data - if not six.PY2: # Python3 - return data.decode('utf-8') - return data + return data.decode('utf-8') def clean(s, also=''): diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -15,7 +15,6 @@ sys.path.insert(0, "/var/www/securedrop") # noqa: E402 import qrcode -from six.moves import input from flask import current_app from sqlalchemy.orm.exc import NoResultFound diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py --- a/securedrop/management/submissions.py +++ b/securedrop/management/submissions.py @@ -5,8 +5,6 @@ import sys import time -from six.moves import input - from db import db from rm import secure_delete from models import Reply, Source, Submission diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py --- a/securedrop/secure_tempfile.py +++ b/securedrop/secure_tempfile.py @@ -2,7 +2,6 @@ import base64 import os import io -import six from tempfile import _TemporaryFileWrapper # type: ignore from pretty_bad_protocol._util import _STREAMLIKE_TYPES @@ -50,10 +49,7 @@ def __init__(self, store_dir): self.create_key() data = base64.urlsafe_b64encode(os.urandom(32)) - if not six.PY2: # For Python3 - self.tmp_file_id = data.decode('utf-8').strip('=') - else: - self.tmp_file_id = data.strip('=') + self.tmp_file_id = data.decode('utf-8').strip('=') self.filepath = os.path.join(store_dir, '{}.aes'.format(self.tmp_file_id)) @@ -90,8 +86,7 @@ def write(self, data): raise AssertionError('You cannot write after reading!') self.last_action = 'write' - # This is the old Python related code - if isinstance(data, six.text_type): + if isinstance(data, str): data = data.encode('utf-8') self.file.write(self.encryptor.update(data)) diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py --- a/securedrop/source_app/info.py +++ b/securedrop/source_app/info.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- -import six from flask import Blueprint, render_template, send_file, current_app -if six.PY2: - from cStringIO import StringIO # noqa -else: - from io import BytesIO # noqa +from io import BytesIO # noqa def make_blueprint(config): @@ -23,10 +19,7 @@ def recommend_tor_browser(): def download_journalist_pubkey(): journalist_pubkey = current_app.crypto_util.gpg.export_keys( config.JOURNALIST_KEY) - if six.PY2: - data = StringIO(journalist_pubkey) - else: - data = BytesIO(journalist_pubkey.encode('utf-8')) + data = BytesIO(journalist_pubkey.encode('utf-8')) return send_file(data, mimetype="application/pgp-keys", attachment_filename=config.JOURNALIST_KEY + ".asc", diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -1,7 +1,6 @@ import operator import os import io -import six from datetime import datetime from flask import (Blueprint, render_template, flash, redirect, url_for, g, @@ -97,10 +96,7 @@ def lookup(): with io.open(reply_path, "rb") as f: contents = f.read() reply_obj = current_app.crypto_util.decrypt(g.codename, contents) - if six.PY2: # Python2 - reply.decrypted = reply_obj.decode('utf-8') - else: - reply.decrypted = reply_obj + reply.decrypted = reply_obj except UnicodeDecodeError: current_app.logger.error("Could not decode reply %s" % reply.filename)
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- +import configparser import pretty_bad_protocol as gnupg import logging from hypothesis import settings @@ -12,8 +13,6 @@ import signal import subprocess -from six.moves import configparser - from flask import url_for from pyotp import TOTP diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -1,6 +1,5 @@ from . import source_navigation_steps, journalist_navigation_steps from . import functional_test -import six class TestSourceInterface( @@ -28,6 +27,5 @@ def test_journalist_key_from_source_interface(self): data = self.return_downloaded_content(self.source_location + "/journalist-key", None) - if six.PY3: - data = data.decode('utf-8') + data = data.decode('utf-8') assert "BEGIN PGP PUBLIC KEY BLOCK" in data diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -6,7 +6,6 @@ import os import pytest import re -import six os.environ['SECUREDROP_ENV'] = 'test' # noqa import crypto_util @@ -343,10 +342,4 @@ def test_encrypt_then_decrypt_gives_same_result( ciphertext = crypto.encrypt(message, str(key)) decrypted_text = crypto.decrypt(secret, ciphertext) - # `hypothesis.strategies.text()` generates `unicode` char sequences; use - # decode('utf-8') in order to decode decrypted ciphertext as `unicode` for - # correct type comparisons. Only decode on Python 2. - if six.PY2: - decrypted_text = decrypted_text.decode('utf-8') - assert decrypted_text == message diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py --- a/securedrop/tests/test_i18n_tool.py +++ b/securedrop/tests/test_i18n_tool.py @@ -2,7 +2,6 @@ import io import os -import six from os.path import abspath, dirname, exists, getmtime, join, realpath os.environ['SECUREDROP_ENV'] = 'test' # noqa import i18n_tool @@ -254,7 +253,7 @@ def test_update_from_weblate(self, tmpdir, caplog): k = {'_cwd': join(d, repo)} git.init(**k) git.config('user.email', '[email protected]', **k) - git.config('user.name', six.u('Loïc Nordhøy'), **k) + git.config('user.name', 'Loïc Nordhøy', **k) touch('README.md', **k) git.add('README.md', **k) git.commit('-m', 'README', 'README.md', **k) @@ -315,9 +314,9 @@ def r(): ]) assert 'l10n: updated Dutch (nl)' not in r() assert 'l10n: updated German (de_DE)' not in r() - message = six.text_type(git('--no-pager', '-C', 'securedrop', 'show', - _cwd=d, _encoding='utf-8')) - assert six.u("Loïc") in message + message = str(git('--no-pager', '-C', 'securedrop', 'show', + _cwd=d, _encoding='utf-8')) + assert "Loïc" in message # # an update is done to nl in weblate @@ -348,7 +347,7 @@ def r(): ]) assert 'l10n: updated Dutch (nl)' in r() assert 'l10n: updated German (de_DE)' not in r() - message = six.text_type(git('--no-pager', '-C', 'securedrop', 'show', - _cwd=d)) + message = str(git('--no-pager', '-C', 'securedrop', 'show', + _cwd=d)) assert "Someone Else" in message - assert six.u("Loïc") not in message + assert "Loïc" not in message diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -10,8 +10,6 @@ from distutils.version import StrictVersion from io import BytesIO -import six - import mock import pytest from bs4 import BeautifulSoup @@ -149,7 +147,7 @@ def assertion(): def test_submit_file(source_app, journalist_app, test_journo): """When a source creates an account, test that a new entry appears in the journalist interface""" - test_file_contents = six.b("This is a test file.") + test_file_contents = b"This is a test file." test_filename = "test.txt" with source_app.test_client() as app: @@ -159,7 +157,7 @@ def test_submit_file(source_app, journalist_app, test_journo): # redirected to submission form resp = app.post('/submit', data=dict( msg="", - fh=(six.BytesIO(test_file_contents), test_filename), + fh=(BytesIO(test_file_contents), test_filename), ), follow_redirects=True) assert resp.status_code == 200 app.get('/logout') @@ -193,7 +191,7 @@ def test_submit_file(source_app, journalist_app, test_journo): decrypted_data = journalist_app.crypto_util.gpg.decrypt(resp.data) assert decrypted_data.ok - sio = six.BytesIO(decrypted_data.data) + sio = BytesIO(decrypted_data.data) with gzip.GzipFile(mode='rb', fileobj=sio) as gzip_file: unzipped_decrypted_data = gzip_file.read() mtime = gzip_file.mtime @@ -261,7 +259,7 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, # redirected to submission form resp = app.post('/submit', data=dict( msg=test_msg, - fh=(six.BytesIO(six.b('')), ''), + fh=(BytesIO(b''), ''), ), follow_redirects=True) assert resp.status_code == 200 assert not g.source.flagged @@ -339,7 +337,7 @@ def assertion(): ), follow_redirects=True) assert resp.status_code == 200 - zf = zipfile.ZipFile(six.BytesIO(resp.data), 'r') + zf = zipfile.ZipFile(BytesIO(resp.data), 'r') data = zf.read(zf.namelist()[0]) _can_decrypt_with_key(journalist_app, data) _can_decrypt_with_key( @@ -465,7 +463,7 @@ def test_unicode_reply_with_ansi_env(journalist_app, journalist_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" source_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" _helper_test_reply(journalist_app, source_app, config, test_journo, - six.u("ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ"), True) + "ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ", True) def test_delete_collection(mocker, source_app, journalist_app, test_journo): diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -2,7 +2,6 @@ import os import pytest import io -import six import random import zipfile import base64 @@ -1284,12 +1283,8 @@ def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): "error") log_event = mocked_error_logger.call_args[0][0] - if six.PY2: - assert ("Adding user 'username' failed: (__builtin__.NoneType) " - "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event - else: - assert ("Adding user 'username' failed: (builtins.NoneType) " - "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event + assert ("Adding user 'username' failed: (builtins.NoneType) " + "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin): diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py --- a/securedrop/tests/test_secure_tempfile.py +++ b/securedrop/tests/test_secure_tempfile.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import io import os -import six import pytest from pretty_bad_protocol._util import _is_stream @@ -51,7 +50,7 @@ def test_write_then_read_then_write(): def test_read_write_unicode(): f = SecureTemporaryFile('/tmp') - unicode_msg = six.u('鬼神 Kill Em All 1989') + unicode_msg = '鬼神 Kill Em All 1989' f.write(unicode_msg) assert f.read().decode('utf-8') == unicode_msg diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -2,10 +2,9 @@ import gzip import re import subprocess -import six import time -from io import BytesIO +from io import BytesIO, StringIO from flask import session, escape, current_app, url_for, g from mock import patch, ANY @@ -296,7 +295,7 @@ def _dummy_submission(app): """ return app.post( url_for('main.submit'), - data=dict(msg=six.u("Pay no attention to the man behind the curtain."), + data=dict(msg="Pay no attention to the man behind the curtain.", fh=(BytesIO(b''), '')), follow_redirects=True) @@ -321,7 +320,7 @@ def test_submit_message(source_app): _dummy_submission(app) resp = app.post( url_for('main.submit'), - data=dict(msg=six.u("This is a test."), fh=(six.StringIO(six.u('')), '')), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') @@ -333,7 +332,7 @@ def test_submit_empty_message(source_app): new_codename(app, session) resp = app.post( url_for('main.submit'), - data=dict(msg="", fh=(six.StringIO(six.u('')), '')), + data=dict(msg="", fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') @@ -352,7 +351,7 @@ def test_submit_big_message(source_app): _dummy_submission(app) resp = app.post( url_for('main.submit'), - data=dict(msg="AA" * (1024 * 512), fh=(six.StringIO(six.u('')), '')), + data=dict(msg="AA" * (1024 * 512), fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') @@ -398,7 +397,7 @@ def test_submit_message_with_low_entropy(source_app): _dummy_submission(app) resp = app.post( url_for('main.submit'), - data=dict(msg="This is a test.", fh=(six.StringIO(six.u('')), '')), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 assert not async_genkey.called @@ -415,7 +414,7 @@ def test_submit_message_with_enough_entropy(source_app): _dummy_submission(app) resp = app.post( url_for('main.submit'), - data=dict(msg="This is a test.", fh=(six.StringIO(six.u('')), '')), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 assert async_genkey.called @@ -592,7 +591,7 @@ def test_failed_normalize_timestamps_logs_warning(source_app): url_for('main.submit'), data=dict( msg="This is a test.", - fh=(six.StringIO(six.u('')), '')), + fh=(StringIO(''), '')), follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') diff --git a/securedrop/tests/utils/instrument.py b/securedrop/tests/utils/instrument.py --- a/securedrop/tests/utils/instrument.py +++ b/securedrop/tests/utils/instrument.py @@ -9,7 +9,7 @@ """ -from six.moves.urllib.parse import urlparse, urljoin +from urllib.parse import urlparse, urljoin import pytest
remove Python 2 code paths in server code ## Description If no major issues are found with the Python 2 to 3 transition during SecureDrop 1.0.0 QA, let's remove: - [ ] the `PYTHON_VERSION` env var - [ ] the Python 2 dockerfile - [ ] the Python 2 application test CI job (and the Python 2-only DLC steps!) - [ ] any Python 2 - only logic (e.g. all the `six` callouts) from the server side code _only_ - [ ] any developer docs referring to the above or the code needing to be both Python 2 and Python 3 compliant Keep in mind that we still need to complete the migration of securedrop-admin to Python 3, which we'll do in the next regular release after 1.0.0 (i.e. 1.1.0). However the only function that is used during the staging provisioning process is both Python 2 and 3 compliant, so removing the Python 2 env var in the server code should not cause issues.
2019-09-23T22:08:13Z
[]
[]
freedomofpress/securedrop
4,865
freedomofpress__securedrop-4865
[ "4693" ]
da6223229c6aa71d96aed8f4e6a152afed288b19
diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py --- a/install_files/ansible-base/callback_plugins/ansible_version_check.py +++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py @@ -21,7 +21,7 @@ class CallbackModule(CallbackBase): def __init__(self): # Can't use `on_X` because this isn't forwards compatible # with Ansible 2.0+ - required_version = '2.6.14' # Keep synchronized with requirements files + required_version = '2.6.19' # Keep synchronized with requirements files if not ansible.__version__.startswith(required_version): print_red_bold( "SecureDrop restriction: only Ansible {version}.*"
diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst --- a/docs/development/testing_configuration_tests.rst +++ b/docs/development/testing_configuration_tests.rst @@ -14,7 +14,7 @@ Installation .. code:: sh - pip install -r securedrop/requirements/develop-requirements.txt + pip install --require-hashes -r securedrop/requirements/python3/develop-requirements.txt Running the Config Tests diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -8,13 +8,13 @@ @pytest.mark.parametrize('package', [ 'tor', ]) -def test_tor_packages(Package, package): +def test_tor_packages(host, package): """ Ensure Tor packages are installed. Does not include the Tor keyring package, since we want only the SecureDrop Release Signing Key to be used even for Tor packages. """ - assert Package(package).is_installed + assert host.package(package).is_installed def test_tor_service_running(host): diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -52,7 +52,7 @@ def test_generic_kernels_absent(host, package): c = host.run("dpkg -l {}".format(package)) assert c.rc == 1 error_text = "dpkg-query: no packages found matching {}".format(package) - assert c.stderr == error_text + assert error_text in c.stderr.strip() def test_grsecurity_lock_file(host): @@ -71,8 +71,8 @@ def test_grsecurity_kernel_is_running(host): Make sure the currently running kernel is specific grsec kernel. """ c = host.run('uname -r') - assert c.stdout.endswith('-grsec') - assert c.stdout == '{}-grsec'.format(KERNEL_VERSION) + assert c.stdout.strip().endswith('-grsec') + assert c.stdout.strip() == '{}-grsec'.format(KERNEL_VERSION) @pytest.mark.parametrize('sysctl_opt', [ @@ -130,7 +130,7 @@ def test_grub_pc_marked_manual(host): """ c = host.run('apt-mark showmanual grub-pc') assert c.rc == 0 - assert c.stdout == "grub-pc" + assert c.stdout.strip() == "grub-pc" def test_apt_autoremove(host): diff --git a/molecule/testinfra/staging/common/test_tor_mirror.py b/molecule/testinfra/staging/common/test_tor_mirror.py --- a/molecule/testinfra/staging/common/test_tor_mirror.py +++ b/molecule/testinfra/staging/common/test_tor_mirror.py @@ -28,7 +28,7 @@ def test_tor_keyring_absent(host): c = host.run("dpkg -l {}".format(package)) assert c.rc == 1 error_text = "dpkg-query: no packages found matching {}".format(package) - assert c.stderr.rstrip() == error_text + assert error_text in c.stderr.strip() @pytest.mark.parametrize('tor_key_info', [ diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -65,7 +65,7 @@ def test_sudoers_tmux_env(host): (tmux attach || tmux_attach_via_proc || tmux new-session) fi""" ) - assert host_file.content_string == expected_content + assert host_file.content_string.strip() == expected_content def test_tmux_installed(host):
update ansible to 2.6.18 or later due to CVE-2019-10156 ## Description We should update Ansible to version 2.6.18 or later due to [CVE-2019-10156](https://nvd.nist.gov/vuln/detail/CVE-2019-10156). This is a templating vulnerability that would require an attacker to first insert malicious templates into the Admin workstation so the impact is minimal for SecureDrop. Nevertheless, to reduce alert noise and not be using dependencies with known vulnerabilities, we should update.
2019-09-25T14:30:10Z
[]
[]
freedomofpress/securedrop
4,867
freedomofpress__securedrop-4867
[ "3489" ]
94e4a9d1352b919e90e407a518f6c88a50fa22f8
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -26,7 +26,7 @@ sdlog = logging.getLogger(__name__) DIR = os.path.dirname(os.path.realpath(__file__)) -VENV_DIR = os.path.join(DIR, ".venv") +VENV_DIR = os.path.join(DIR, ".venv3") def setup_logger(verbose=False): @@ -52,7 +52,7 @@ def run_command(command): popen = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - for stdout_line in iter(popen.stdout.readline, ""): + for stdout_line in iter(popen.stdout.readline, b""): yield stdout_line popen.stdout.close() return_code = popen.wait() @@ -94,21 +94,21 @@ def install_apt_dependencies(args): apt_command = ['sudo', 'su', '-c', "apt-get update && \ apt-get -q -o=Dpkg::Use-Pty=0 install -y \ - python-virtualenv \ - python-yaml \ - python-pip \ + python3-virtualenv \ + python3-yaml \ + python3-pip \ ccontrol \ virtualenv \ libffi-dev \ libssl-dev \ - libpython2.7-dev", + libpython3-dev", ] try: # Print command results in real-time, to keep Admin apprised # of progress during long-running command. for output_line in run_command(apt_command): - print(output_line.rstrip()) + print(output_line.decode('utf-8').rstrip()) except subprocess.CalledProcessError: # Tails supports apt persistence, which was used by SecureDrop # under Tails 2.x. If updates are being applied, don't try to pile @@ -142,7 +142,7 @@ def envsetup(args): sdlog.info("Setting up virtualenv") try: sdlog.debug(subprocess.check_output( - maybe_torify() + ['virtualenv', VENV_DIR], + maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR], stderr=subprocess.STDOUT)) except subprocess.CalledProcessError as e: sdlog.debug(e.output) @@ -161,7 +161,7 @@ def envsetup(args): def install_pip_self(args): pip_install_cmd = [ - os.path.join(VENV_DIR, 'bin', 'pip'), + os.path.join(VENV_DIR, 'bin', 'pip3'), 'install', '-e', DIR ] try: @@ -174,7 +174,7 @@ def install_pip_self(args): def install_pip_dependencies(args, pip_install_cmd=[ - os.path.join(VENV_DIR, 'bin', 'pip'), + os.path.join(VENV_DIR, 'bin', 'pip3'), 'install', # Specify requirements file. '-r', os.path.join(DIR, 'requirements.txt'), @@ -197,7 +197,7 @@ def install_pip_dependencies(args, pip_install_cmd=[ raise sdlog.debug(pip_output) - if "Successfully installed" in pip_output: + if "Successfully installed" in str(pip_output): sdlog.info("Python dependencies for securedrop-admin upgraded") else: sdlog.info("Python dependencies for securedrop-admin are up-to-date") diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -23,16 +23,13 @@ instances. """ -from __future__ import print_function import argparse import logging import os import io import re -import string import subprocess import sys -import types import json import base64 import prompt_toolkit @@ -417,11 +414,11 @@ def __init__(self, args): SiteConfig.ValidateYesNo(), lambda x: x.lower() == 'yes', lambda config: True], - ['securedrop_supported_locales', [], types.ListType, + ['securedrop_supported_locales', [], list, u'Space separated list of additional locales to support ' '(' + translations + ')', SiteConfig.ValidateLocales(self.args.app_path), - string.split, + str.split, lambda config: True], ['v2_onion_services', self.check_for_v2_onion(), bool, u'Do you want to enable v2 onion services (recommended only for SecureDrop instances installed before 1.0.0)?', # noqa: E501 @@ -526,7 +523,7 @@ def validated_input(self, prompt, default, validator, transform): default = default and 'yes' or 'no' if type(default) is int: default = str(default) - if isinstance(default, types.ListType): + if isinstance(default, list): default = " ".join(default) if type(default) is not str: default = str(default) @@ -534,7 +531,7 @@ def validated_input(self, prompt, default, validator, transform): if validator: kwargs['validator'] = validator value = prompt_toolkit.prompt(prompt, - default=default.decode('utf-8'), + default=default, **kwargs) if transform: return transform(value) @@ -777,7 +774,7 @@ def check_for_updates(args): # Determine what branch we are on current_tag = subprocess.check_output(['git', 'describe'], - cwd=args.root).rstrip('\n') + cwd=args.root).decode('utf-8').rstrip('\n') # noqa: E501 # Fetch all branches git_fetch_cmd = ['git', 'fetch', '--all'] @@ -786,7 +783,7 @@ def check_for_updates(args): # Get latest tag git_all_tags = ["git", "tag"] all_tags = subprocess.check_output(git_all_tags, - cwd=args.root).rstrip('\n').split('\n') + cwd=args.root).decode('utf-8').rstrip('\n').split('\n') # noqa: E501 # Do not check out any release candidate tags all_prod_tags = [x for x in all_tags if 'rc' not in x] @@ -837,7 +834,7 @@ def update(args): try: sig_result = subprocess.check_output(git_verify_tag_cmd, stderr=subprocess.STDOUT, - cwd=args.root) + cwd=args.root).decode('utf-8') good_sig_text = ['Good signature from "SecureDrop Release Signing ' + 'Key"', @@ -869,7 +866,7 @@ def update(args): sdlog.info("Signature verification failed.") return 1 except subprocess.CalledProcessError as e: - if 'not a valid ref' in e.output: + if 'not a valid ref' in e.output.decode('utf-8'): # Then there is no duplicate branch. sdlog.info("Signature verification successful.") else: # If any other exception occurs, we bail. diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py --- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py +++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 import grp import os @@ -22,7 +22,7 @@ path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/' # noqa: E501 path_securedrop_root = '/home/amnesia/Persistent/securedrop' path_securedrop_admin_venv = os.path.join(path_securedrop_root, - 'admin/.venv/bin/python') + 'admin/.venv3/bin/python') path_securedrop_admin_init = os.path.join(path_securedrop_root, 'admin/securedrop_admin/__init__.py') path_gui_updater = os.path.join(path_securedrop_root,
diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -197,137 +197,137 @@ def teardown_function(function): def verify_username_prompt(child): - child.expect("Username for SSH access to the servers:") + child.expect(b"Username for SSH access to the servers:") def verify_reboot_prompt(child): child.expect( - r"Daily reboot time of the server \(24\-hour clock\):", timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' 4' # Expected default + rb"Daily reboot time of the server \(24\-hour clock\):", timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '4' # noqa: E501 def verify_ipv4_appserver_prompt(child): - child.expect(r'Local IPv4 address for the Application Server\:', timeout=2) + child.expect(rb'Local IPv4 address for the Application Server\:', timeout=2) # noqa: E501 # Expected default - assert ANSI_ESCAPE.sub('', child.buffer) == ' 10.20.2.2' + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '10.20.2.2' # noqa: E501 def verify_ipv4_monserver_prompt(child): - child.expect(r'Local IPv4 address for the Monitor Server\:', timeout=2) + child.expect(rb'Local IPv4 address for the Monitor Server\:', timeout=2) # Expected default - assert ANSI_ESCAPE.sub('', child.buffer) == ' 10.20.3.2' + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '10.20.3.2' # noqa: E501 def verify_hostname_app_prompt(child): - child.expect(r'Hostname for Application Server\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' app' # Expected default + child.expect(rb'Hostname for Application Server\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'app' # noqa: E501 def verify_hostname_mon_prompt(child): - child.expect(r'Hostname for Monitor Server\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' mon' # Expected default + child.expect(rb'Hostname for Monitor Server\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'mon' # noqa: E501 def verify_dns_prompt(child): - child.expect(r'DNS server specified during installation\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' 8.8.8.8' # Expected default + child.expect(rb'DNS server specified during installation\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '8.8.8.8' # noqa: E501 def verify_app_gpg_key_prompt(child): - child.expect('Local filepath to public key for SecureDrop Application GPG public key\:', timeout=2) # noqa: E501 + child.expect(rb'Local filepath to public key for SecureDrop Application GPG public key\:', timeout=2) # noqa: E501 def verify_https_prompt(child): - child.expect('Whether HTTPS should be enabled on Source Interface \(requires EV cert\)\:', timeout=2) # noqa: E501 + child.expect(rb'Whether HTTPS should be enabled on Source Interface \(requires EV cert\)\:', timeout=2) # noqa: E501 def verify_https_cert_prompt(child): - child.expect(r'Local filepath to HTTPS certificate\:', timeout=2) + child.expect(rb'Local filepath to HTTPS certificate\:', timeout=2) def verify_https_cert_key_prompt(child): - child.expect(r'Local filepath to HTTPS certificate key\:', timeout=2) + child.expect(rb'Local filepath to HTTPS certificate key\:', timeout=2) def verify_https_cert_chain_file_prompt(child): - child.expect(r'Local filepath to HTTPS certificate chain file\:', timeout=2) # noqa: E501 + child.expect(rb'Local filepath to HTTPS certificate chain file\:', timeout=2) # noqa: E501 def verify_app_gpg_fingerprint_prompt(child): - child.expect('Full fingerprint for the SecureDrop Application GPG Key\:', timeout=2) # noqa: E501 + child.expect(rb'Full fingerprint for the SecureDrop Application GPG Key\:', timeout=2) # noqa: E501 def verify_ossec_gpg_key_prompt(child): - child.expect('Local filepath to OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 + child.expect(rb'Local filepath to OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 def verify_ossec_gpg_fingerprint_prompt(child): - child.expect('Full fingerprint for the OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 + child.expect(rb'Full fingerprint for the OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 def verify_admin_email_prompt(child): - child.expect('Admin email address for receiving OSSEC alerts\:', timeout=2) # noqa: E501 + child.expect(rb'Admin email address for receiving OSSEC alerts\:', timeout=2) # noqa: E501 def verify_journalist_gpg_key_prompt(child): - child.expect('Local filepath to journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 + child.expect(rb'Local filepath to journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 def verify_journalist_fingerprint_prompt(child): - child.expect('Full fingerprint for the journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 + child.expect(rb'Full fingerprint for the journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 def verify_journalist_email_prompt(child): - child.expect('Email address for receiving journalist alerts \(optional\)\:', timeout=2) # noqa: E501 + child.expect(rb'Email address for receiving journalist alerts \(optional\)\:', timeout=2) # noqa: E501 def verify_smtp_relay_prompt(child): - child.expect(r'SMTP relay for sending OSSEC alerts\:', timeout=2) + child.expect(rb'SMTP relay for sending OSSEC alerts\:', timeout=2) # Expected default - assert ANSI_ESCAPE.sub('', child.buffer) == ' smtp.gmail.com' + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'smtp.gmail.com' # noqa: E501 def verify_smtp_port_prompt(child): - child.expect(r'SMTP port for sending OSSEC alerts\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' 587' # Expected default + child.expect(rb'SMTP port for sending OSSEC alerts\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '587' # noqa: E501 def verify_sasl_domain_prompt(child): - child.expect(r'SASL domain for sending OSSEC alerts\:', timeout=2) + child.expect(rb'SASL domain for sending OSSEC alerts\:', timeout=2) # Expected default - assert ANSI_ESCAPE.sub('', child.buffer) == ' gmail.com' + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'gmail.com' # noqa: E501 def verify_sasl_username_prompt(child): - child.expect(r'SASL username for sending OSSEC alerts\:', timeout=2) + child.expect(rb'SASL username for sending OSSEC alerts\:', timeout=2) def verify_sasl_password_prompt(child): - child.expect(r'SASL password for sending OSSEC alerts\:', timeout=2) + child.expect(rb'SASL password for sending OSSEC alerts\:', timeout=2) def verify_ssh_over_lan_prompt(child): - child.expect(r'will be available over LAN only\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default + child.expect(rb'will be available over LAN only\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 def verify_locales_prompt(child): - child.expect('Space separated list of additional locales to support') # noqa: E501 + child.expect(rb'Space separated list of additional locales to support') # noqa: E501 def verify_v2_onion_for_first_time(child): - child.expect(r' installed before 1.0.0\)\?\:', timeout=2) # noqa: E501 - assert ANSI_ESCAPE.sub('', child.buffer) == ' no' # Expected default + child.expect(rb' installed before 1.0.0\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'no' # noqa: E501 def verify_v3_onion_for_first_time(child): - child.expect(r'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 - assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default + child.expect(rb'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 def verify_v3_onion_when_v2_is_enabled(child): - child.expect(r'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 - assert ANSI_ESCAPE.sub('', child.buffer) == ' yes' # Expected default + child.expect(rb'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 def test_sdconfig_on_first_run(): @@ -709,7 +709,7 @@ def set_reliable_keyserver(gpgdir): f.write('keyserver hkps://keys.openpgp.org') # Ensure correct permissions on .gnupg home directory. - os.chmod(gpgdir, 0o700) + os.chmod(gpgdir, 0o0700) @flaky(max_runs=3) @@ -721,7 +721,7 @@ def test_check_for_update_when_updates_needed(securedrop_git_repo): fullcmd = 'coverage run {0} --root {1} check_for_updates'.format( cmd, ansible_base) child = pexpect.spawn(fullcmd) - child.expect('Update needed', timeout=20) + child.expect(b'Update needed', timeout=20) child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit child.close() @@ -745,7 +745,7 @@ def test_check_for_update_when_updates_not_needed(securedrop_git_repo): fullcmd = 'coverage run {0} --root {1} check_for_updates'.format( cmd, ansible_base) child = pexpect.spawn(fullcmd) - child.expect('All updates applied', timeout=20) + child.expect(b'All updates applied', timeout=20) child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit child.close() @@ -766,8 +766,8 @@ def test_update(securedrop_git_repo): cmd, ansible_base)) output = child.read() - assert 'Updated to SecureDrop' in output - assert 'Signature verification successful' in output + assert b'Updated to SecureDrop' in output + assert b'Signature verification successful' in output child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit child.close() @@ -796,8 +796,8 @@ def test_update_fails_when_no_signature_present(securedrop_git_repo): child = pexpect.spawn('coverage run {0} --root {1} update'.format( cmd, ansible_base)) output = child.read() - assert 'Updated to SecureDrop' not in output - assert 'Signature verification failed' in output + assert b'Updated to SecureDrop' not in output + assert b'Signature verification failed' in output child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit child.close() @@ -830,9 +830,9 @@ def test_update_with_duplicate_branch_and_tag(securedrop_git_repo): cmd, ansible_base)) output = child.read() # Verify that we do not falsely check out a branch instead of a tag. - assert 'Switched to branch' not in output - assert 'Updated to SecureDrop' not in output - assert 'Signature verification failed' in output + assert b'Switched to branch' not in output + assert b'Updated to SecureDrop' not in output + assert b'Signature verification failed' in output child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit child.close() diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py --- a/admin/tests/test_securedrop-admin-setup.py +++ b/admin/tests/test_securedrop-admin-setup.py @@ -43,7 +43,7 @@ def test_not_verbose(self, capsys): def test_run_command(self): for output_line in bootstrap.run_command( ['/bin/echo', 'something']): - assert output_line.strip() == 'something' + assert output_line.strip() == b'something' lines = [] with pytest.raises(subprocess.CalledProcessError): @@ -51,8 +51,8 @@ def test_run_command(self): ['sh', '-c', 'echo in stdout ; echo in stderr >&2 ; false']): lines.append(output_line.strip()) - assert lines[0] == 'in stdout' - assert lines[1] == 'in stderr' + assert lines[0] == b'in stdout' + assert lines[1] == b'in stderr' def test_install_pip_dependencies_up_to_date(self, caplog): args = argparse.Namespace() diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -26,7 +26,6 @@ import mock from prompt_toolkit.validation import ValidationError import pytest -import string import subprocess import textwrap import yaml @@ -59,8 +58,8 @@ def test_not_verbose(self, capsys): def test_check_for_updates_update_needed(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - current_tag = "0.6" - tags_available = "0.6\n0.6-rc1\n0.6.1\n" + current_tag = b"0.6" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" with mock.patch('subprocess.check_call'): with mock.patch('subprocess.check_output', @@ -73,8 +72,8 @@ def test_check_for_updates_update_needed(self, tmpdir, caplog): def test_check_for_updates_higher_version(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - current_tag = "0.6" - tags_available = "0.1\n0.10.0\n0.6.2\n0.6\n0.6-rc1\n0.9.0\n" + current_tag = b"0.6" + tags_available = b"0.1\n0.10.0\n0.6.2\n0.6\n0.6-rc1\n0.9.0\n" with mock.patch('subprocess.check_call'): with mock.patch('subprocess.check_output', @@ -88,8 +87,8 @@ def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog): """Regression test for #3426""" git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - current_tag = "0.6.1\n" - tags_available = "0.6\n0.6-rc1\n0.6.1\n" + current_tag = b"0.6.1\n" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" with mock.patch('subprocess.check_call'): with mock.patch('subprocess.check_output', @@ -102,8 +101,8 @@ def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog): def test_check_for_updates_update_not_needed(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - current_tag = "0.6.1" - tags_available = "0.6\n0.6-rc1\n0.6.1\n" + current_tag = b"0.6.1" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" with mock.patch('subprocess.check_call'): with mock.patch('subprocess.check_output', @@ -118,8 +117,8 @@ def test_check_for_updates_if_most_recent_tag_is_rc(self, tmpdir, caplog): verify that users will not accidentally check out this tag.""" git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - current_tag = "0.6.1" - tags_available = "0.6\n0.6-rc1\n0.6.1\n0.6.1-rc1\n" + current_tag = b"0.6.1" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n0.6.1-rc1\n" with mock.patch('subprocess.check_call'): with mock.patch('subprocess.check_output', @@ -152,31 +151,31 @@ def test_get_release_key_from_valid_keyserver(self, tmpdir, caplog): args, keyserver='test.com') @pytest.mark.parametrize("git_output", - ['gpg: Signature made Tue 13 Mar ' - '2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '22245C81E3BAEB4138B36061310F561200F4AD77\n' - 'gpg: Good signature from "SecureDrop Release ' - 'Signing Key" [unknown]\n', - - 'gpg: Signature made Thu 20 Jul ' - '2017 08:12:25 PM EDT\n' - 'gpg: using RSA key ' - '22245C81E3BAEB4138B36061310F561200F4AD77\n' - 'gpg: Good signature from "SecureDrop Release ' - 'Signing Key ' - '<[email protected]>"\n', - - 'gpg: Signature made Thu 20 Jul ' - '2017 08:12:25 PM EDT\n' - 'gpg: using RSA key ' - '22245C81E3BAEB4138B36061310F561200F4AD77\n' - 'gpg: Good signature from "SecureDrop Release ' - 'Signing Key" [unknown]\n' - 'gpg: aka "SecureDrop Release ' - 'Signing Key ' - '<[email protected]>" ' - '[unknown]\n']) + [b'gpg: Signature made Tue 13 Mar ' + b'2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n', + + b'gpg: Signature made Thu 20 Jul ' + b'2017 08:12:25 PM EDT\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key ' + b'<[email protected]>"\n', + + b'gpg: Signature made Thu 20 Jul ' + b'2017 08:12:25 PM EDT\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n' + b'gpg: aka "SecureDrop Release ' + b'Signing Key ' + b'<[email protected]>" ' + b'[unknown]\n']) def test_update_signature_verifies(self, tmpdir, caplog, git_output): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) @@ -188,7 +187,7 @@ def test_update_signature_verifies(self, tmpdir, caplog, git_output): side_effect=[ git_output, subprocess.CalledProcessError(1, 'cmd', - 'not a valid ref')]), + b'not a valid ref')]), ] for patcher in patchers: @@ -208,11 +207,11 @@ def test_update_unexpected_exception_git_refs(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '22245C81E3BAEB4138B36061310F561200F4AD77\n' - 'gpg: Good signature from "SecureDrop Release ' - 'Signing Key" [unknown]\n') + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n') patchers = [ mock.patch('securedrop_admin.check_for_updates', @@ -222,7 +221,7 @@ def test_update_unexpected_exception_git_refs(self, tmpdir, caplog): side_effect=[ git_output, subprocess.CalledProcessError(1, 'cmd', - 'a random error')]), + b'a random error')]), ] for patcher in patchers: @@ -242,11 +241,11 @@ def test_update_signature_does_not_verify(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '22245C81E3BAEB4138B36061310F561200F4AD77\n' - 'gpg: BAD signature from "SecureDrop Release ' - 'Signing Key" [unknown]\n') + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: BAD signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n') with mock.patch('securedrop_admin.check_for_updates', return_value=(True, "0.6.1")): @@ -263,11 +262,11 @@ def test_update_malicious_key_named_fingerprint(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '1234567812345678123456781234567812345678\n' - 'gpg: Good signature from "22245C81E3BAEB4138' - 'B36061310F561200F4AD77" [unknown]\n') + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from "22245C81E3BAEB4138' + b'B36061310F561200F4AD77" [unknown]\n') with mock.patch('securedrop_admin.check_for_updates', return_value=(True, "0.6.1")): @@ -284,11 +283,11 @@ def test_update_malicious_key_named_good_sig(self, tmpdir, caplog): git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '1234567812345678123456781234567812345678\n' - 'gpg: Good signature from Good signature from ' - '"SecureDrop Release Signing Key" [unknown]\n') + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from Good signature from ' + b'"SecureDrop Release Signing Key" [unknown]\n') with mock.patch('securedrop_admin.check_for_updates', return_value=(True, "0.6.1")): @@ -306,12 +305,12 @@ def test_update_malicious_key_named_good_sig_fingerprint(self, tmpdir, git_repo_path = str(tmpdir) args = argparse.Namespace(root=git_repo_path) - git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' - 'gpg: using RSA key ' - '1234567812345678123456781234567812345678\n' - 'gpg: Good signature from 22245C81E3BAEB4138' - 'B36061310F561200F4AD77 Good signature from ' - '"SecureDrop Release Signing Key" [unknown]\n') + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from 22245C81E3BAEB4138' + b'B36061310F561200F4AD77 Good signature from ' + b'"SecureDrop Release Signing Key" [unknown]\n') with mock.patch('securedrop_admin.check_for_updates', return_value=(True, "0.6.1")): @@ -438,7 +437,7 @@ def test_validate_ossec_email(self): assert validator.validate(Document('[email protected]')) with pytest.raises(ValidationError) as e: validator.validate(Document('[email protected]')) - assert 'something other than [email protected]' in e.value.message + assert 'something other than [email protected]' in str(e) def test_validate_optional_email(self): validator = securedrop_admin.SiteConfig.ValidateOptionalEmail() @@ -555,22 +554,22 @@ def test_validate_fingerprint(self): with pytest.raises(ValidationError) as e: validator.validate(Document( "65A1B5FF195B56353CC63DFFCC40EF1228271441")) - assert 'TEST journalist' in e.value.message + assert 'TEST journalist' in str(e) with pytest.raises(ValidationError) as e: validator.validate(Document( "600BC6D5142C68F35DDBCEA87B597104EDDDC102")) - assert 'TEST admin' in e.value.message + assert 'TEST admin' in str(e) with pytest.raises(ValidationError) as e: validator.validate(Document( "0000")) - assert '40 hexadecimal' in e.value.message + assert '40 hexadecimal' in str(e) with pytest.raises(ValidationError) as e: validator.validate(Document( "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz")) - assert '40 hexadecimal' in e.value.message + assert '40 hexadecimal' in str(e) def test_validate_optional_fingerprint(self): validator = securedrop_admin.SiteConfig.ValidateOptionalFingerprint() @@ -603,7 +602,7 @@ def test_validate_locales(self): assert validator.validate(Document('en_US fr_FR ')) with pytest.raises(ValidationError) as e: validator.validate(Document('BAD')) - assert 'BAD' in e.value.message + assert 'BAD' in str(e) def test_save(self, tmpdir): site_config_path = join(str(tmpdir), 'site_config') @@ -654,7 +653,7 @@ def test_validate_gpg_key(self, caplog): site_config.config = bad_config with pytest.raises(securedrop_admin.FingerprintException) as e: site_config.validate_gpg_keys() - assert 'FAIL does not match' in e.value.message + assert 'FAIL does not match' in str(e) def test_journalist_alert_email(self): args = argparse.Namespace(site_config='INVALID', @@ -680,13 +679,13 @@ def test_journalist_alert_email(self): with pytest.raises( securedrop_admin.JournalistAlertEmailException) as e: site_config.validate_journalist_alert_email() - assert 'not be empty' in e.value.message + assert 'not be empty' in str(e) site_config.config['journalist_alert_email'] = 'bademail' with pytest.raises( securedrop_admin.JournalistAlertEmailException) as e: site_config.validate_journalist_alert_email() - assert 'Must contain a @' in e.value.message + assert 'Must contain a @' in str(e) site_config.config['journalist_alert_email'] = '[email protected]' assert site_config.validate_journalist_alert_email() @@ -952,7 +951,7 @@ def auto_prompt(prompt, default, **kwargs): assert value == site_config.validated_input( '', value, lambda: True, None) assert value.lower() == site_config.validated_input( - '', value, lambda: True, string.lower) + '', value, lambda: True, str.lower) assert 'yes' == site_config.validated_input( '', True, lambda: True, None) assert 'no' == site_config.validated_input(
Migrate securedrop-admin to Python 3 ## Description We need to migrate away from Python 2 entirely, as it EOLs in 2020. This ticket is to migrate the `securedrop-admin` CLI tool to Python 3. The good news is that Python 3 is in Tails already (indeed the SecureDrop GUI updater in Tails uses Python 3), so this is a migration that can be done relatively soon.
I've been working on this issue and have the code, install scripts, and tests ported over to python3 ([which can be viewed here](https://github.com/freedomofpress/securedrop/compare/develop...kneitinger:3489-migrate-securedrop-admin-to-py3)), with all tests passing , but am curious about if some other considerations should be included in this. - `prompt_toolkit` recently had an API-breaking update, would it be helpful to include that update as well? - `bootstrap.py` seems like it could be easier to maintain and modify if it were an ansible playbook, is there any specific reason the current design choice was made? Edit: in thinking about it more, I feel it is favorable to have an install script that doesn't have any dependencies to run it. Thanks @kneitinger How much work do you think it will be to move to the newer `prompt_toolkit`? I do not think it would be too much work at all to transition over to the new interface, but if I recall correctly my question was more feeling out if you folks have any internal processes about package versions used. After the tooling has been migrated to Py3, we'll have to handle the transition of the virtualenv on Admin Workstations from py2 -> py3, as well. Will open a separate issue to track.
2019-09-25T16:40:19Z
[]
[]
freedomofpress/securedrop
4,877
freedomofpress__securedrop-4877
[ "4387" ]
6cf415ebeb3f44d54f69562c3852f482af4c7a18
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py --- a/journalist_gui/journalist_gui/SecureDropUpdater.py +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -15,6 +15,15 @@ ESCAPE_POD = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') +def password_is_set(): + + pwd_flag = subprocess.check_output(['passwd', '--status']).decode('utf-8').split()[1] + + if pwd_flag == 'NP': + return False + return True + + def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # noqa # Null byte triggers abstract namespace @@ -27,7 +36,7 @@ def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # except OSError as e: if e.errno == ALREADY_BOUND_ERRNO: err_dialog = QtWidgets.QMessageBox() - err_dialog.setText(name + ' is already running.') + err_dialog.setText(name + strings.app_is_already_running) err_dialog.exec() sys.exit() else: @@ -280,11 +289,17 @@ def on_failure(self): self.progressBar.setProperty("value", 0) def update_securedrop(self): - self.pushButton_2.setEnabled(False) - self.pushButton.setEnabled(False) - self.progressBar.setProperty("value", 10) - self.update_status_bar_and_output(strings.fetching_update) - self.update_thread.start() + if password_is_set(): + self.pushButton_2.setEnabled(False) + self.pushButton.setEnabled(False) + self.progressBar.setProperty("value", 10) + self.update_status_bar_and_output(strings.fetching_update) + self.update_thread.start() + else: + self.pushButton_2.setEnabled(False) + pwd_err_dialog = QtWidgets.QMessageBox() + pwd_err_dialog.setText(strings.no_password_set_message) + pwd_err_dialog.exec() def alert_success(self): self.success_dialog = QtWidgets.QMessageBox() diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py --- a/journalist_gui/journalist_gui/strings.py +++ b/journalist_gui/journalist_gui/strings.py @@ -42,3 +42,7 @@ initial_text_box = ("When the update begins, this area will populate with " "output.\n") doing_setup = "Checking dependencies are up to date... (2 mins remaining)" +no_password_set_message = ("The Tails Administration Password was not set.\n\n" + "Please reboot and set a password before updating " + "SecureDrop.") +app_is_already_running = " is already running."
diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py --- a/journalist_gui/test_gui.py +++ b/journalist_gui/test_gui.py @@ -250,6 +250,14 @@ def test_tails_status_failure(self): mock_remove.assert_not_called() self.assertEqual(self.window.progressBar.value(), 0) + @mock.patch('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') + def test_no_update_without_password(self, mock_msgbox): + with mock.patch('journalist_gui.SecureDropUpdater.password_is_set', + return_value=False): + self.window.update_securedrop() + self.assertEqual(self.window.pushButton.isEnabled(), True) + self.assertEqual(self.window.pushButton_2.isEnabled(), False) + if __name__ == '__main__': unittest.main()
Graphical updater re-appears, fails after initial run without admin password ## Description The graphical updater fails to successfully complete an update, if it is re-run after an update that failed due to a Tails admin password not being set. ## Steps to Reproduce 1. Boot up the Admin Workstation with an update pending; 2. Do _not_ set a Tails admin password; 3. Proceed with the update when prompted by the graphical updater. 4. Cancel the update and reboot the workstation at the step that requires a Tails admin password. 5. Proceed with the update when prompted by the graphical updater. ## Expected Behavior I can successfully complete the update. ## Actual Behavior The update fails ("Update failed").
Hey folks - any proposed workaround for this issue? Have a user reporting it in our Support Portal. Yup, this is caused by a flag file that does not get removed when the updater fails in this scenario. The most straightforward workaround is to: - delete the file `~amnesia/Persistent/.securedrop/securedrop_update.flag` on the Admin Workstation - reboot Tails and start with the PV unlocked and an admin password set. If the `git checkout` portion of the update completed successfully, the GUI updater will no longer appear. If you want to force it to run again (not necessary for releases that don't require a `setup` or `tailsconfig` update), open a terminal and run: ``` cd ~/Persistent/securedrop git tag -v <previous release here> # verify that tag is signed git checkout <previous release here> ``` Then, bounce the network connection. The GUI updater will appear and should work correctly, prompting you for the Tails admin pw about halfway through.
2019-09-27T18:40:48Z
[]
[]
freedomofpress/securedrop
4,914
freedomofpress__securedrop-4914
[ "4910", "4910" ]
b6c6d931deb0d4e36bc1cabd25b816beb2e58127
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -20,6 +20,7 @@ import argparse import logging import os +import shutil import subprocess import sys @@ -68,13 +69,48 @@ def is_tails(): id = None # dirty hack to unreliably detect Tails 4.0~beta2 - if id == 'Debian': + if id == b'Debian': if os.uname()[1] == 'amnesia': id = 'Tails' return id == 'Tails' +def clean_up_tails3_venv(virtualenv_dir=VENV_DIR): + """ + Tails 3.x, based on debian stretch uses libpython3.5, whereas Tails 4.x is + based on Debian Buster and uses libpython3.7. This means that the Tails 3.x + virtualenv will not work under Tails 4.x, and will need to be destroyed and + rebuilt. We can detect if the version of libpython is 3.5 in the + admin/.venv3/ folder, and delete it if that's the case. This will ensure a + smooth upgrade from Tails 3.x to Tails 4.x. + """ + if is_tails(): + try: + dist = subprocess.check_output('lsb_release --codename --short', + shell=True).strip() + except subprocess.CalledProcessError: + dist = None + + # tails4 is based on buster + if dist == b'buster': + python_lib_path = os.path.join(virtualenv_dir, "lib/python3.5") + if os.path.exists(os.path.join(python_lib_path)): + sdlog.info( + "Tails 3 Python 3 virtualenv detected. " + "Removing it." + ) + shutil.rmtree(virtualenv_dir) + sdlog.info("Tails 3 Python 3 virtualenv deleted.") + + +def checkenv(args): + clean_up_tails3_venv(VENV_DIR) + if not os.path.exists(os.path.join(VENV_DIR, "bin/activate")): + sdlog.error('Please run "securedrop-admin setup".') + sys.exit(1) + + def maybe_torify(): if is_tails(): return ['torify'] @@ -130,6 +166,9 @@ def envsetup(args): Ansible is available to the Admin on subsequent boots without requiring installation of packages again. """ + # clean up tails 3.x venv when migrating to tails 4.x + clean_up_tails3_venv(VENV_DIR) + # virtualenv doesnt exist? Install dependencies and create if not os.path.exists(VENV_DIR): @@ -209,18 +248,30 @@ def parse_argv(argv): help="Increase verbosity on output") parser.set_defaults(func=envsetup) + subparsers = parser.add_subparsers() + + envsetup_parser = subparsers.add_parser( + 'envsetup', + help='Set up the admin virtualenv.' + ) + envsetup_parser.set_defaults(func=envsetup) + + checkenv_parser = subparsers.add_parser( + 'checkenv', + help='Check that the admin virtualenv is properly set up.' + ) + checkenv_parser.set_defaults(func=checkenv) + return parser.parse_args(argv) if __name__ == "__main__": args = parse_argv(sys.argv[1:]) setup_logger(args.v) - if args.v: + + try: args.func(args) + except Exception: + sys.exit(1) else: - try: - args.func(args) - except Exception: - sys.exit(1) - else: - sys.exit(0) + sys.exit(0)
diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py --- a/admin/tests/test_securedrop-admin-setup.py +++ b/admin/tests/test_securedrop-admin-setup.py @@ -18,6 +18,8 @@ # import argparse +import mock +import os import pytest import subprocess @@ -74,3 +76,46 @@ def test_install_pip_dependencies_fail(self, caplog): assert 'Failed to install' in caplog.text assert 'in stdout' in caplog.text assert 'in stderr' in caplog.text + + def test_python3_stretch_venv_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(str(tmpdir), 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value=b"buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert 'Tails 3 Python 3 virtualenv detected.' in caplog.text + assert 'Tails 3 Python 3 virtualenv deleted.' in caplog.text + assert not os.path.exists(venv_path) + + def test_python3_buster_venv_not_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.7') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert ( + 'Tails 3 Python 3 virtualenv detected' not in caplog.text + ) + assert os.path.exists(venv_path) + + def test_python3_stretch_venv_not_deleted_in_stretch(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="stretch"): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path) + + def test_venv_cleanup_subprocess_exception(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError(1, + ':o')): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path)
securedrop-admin (setup) fails in Tails 4.0-rc1 ## Description Running `./securedrop-admin setup` in tails 4.0-rc1 (upgraded from 3.16) returns the following error: ``` INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ``` This was done in VMs, will try to reproduce on hardware. ## Steps to Reproduce 1. Upgrade Tails device from 3.16 to 4.0 (Boot into 4.0-rc1 drive and clone to upgrade 3.16 drive) 2. Boot into newly-upgraded tails drive 3. Verify that the internet is working (tor is bootstrapped, you can reach an external website over tor) 4. check out `1.1.0~rc2` tag 5. Run `./securedrop-admin setup` in ~/Persistent/securedrop 6. Observe error ## Expected Behavior Securedrop-admin should run and the dependencies should be installed. ## Actual Behavior Securedrop-admin fails and returns an error, the dependencies are not installed ## Comments Per https://github.com/freedomofpress/securedrop/pull/4852/files#diff-b5e536cc161fcc0d62e661b4d6eae381R70-R73 When running the commands locally, I get * `lsb_release --id --short` returns `Debian` * `uname -a` returns `Linux amnesia 5.3.0-trunk-amd64 #1 SMF Debian 5.3.2-1~exp1 (2019-10-02) x86_64 GNU/Linux` When i run ./securedrop-admin with no parameter, I get: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin help Could not find platform independent libraries <prefix> Could not find platform dependent libraries <exec_prefix> Consider setting $PYTHONHOME to <prefix>[:<exec_prefix>] Fatal Python error: Py_Initialize: Unable to get the locale encoding ImportError: No module named 'encodings' Current thread 0x00007cf687450740 (most recent call first): Aborted ``` securedrop-admin (setup) fails in Tails 4.0-rc1 ## Description Running `./securedrop-admin setup` in tails 4.0-rc1 (upgraded from 3.16) returns the following error: ``` INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ``` This was done in VMs, will try to reproduce on hardware. ## Steps to Reproduce 1. Upgrade Tails device from 3.16 to 4.0 (Boot into 4.0-rc1 drive and clone to upgrade 3.16 drive) 2. Boot into newly-upgraded tails drive 3. Verify that the internet is working (tor is bootstrapped, you can reach an external website over tor) 4. check out `1.1.0~rc2` tag 5. Run `./securedrop-admin setup` in ~/Persistent/securedrop 6. Observe error ## Expected Behavior Securedrop-admin should run and the dependencies should be installed. ## Actual Behavior Securedrop-admin fails and returns an error, the dependencies are not installed ## Comments Per https://github.com/freedomofpress/securedrop/pull/4852/files#diff-b5e536cc161fcc0d62e661b4d6eae381R70-R73 When running the commands locally, I get * `lsb_release --id --short` returns `Debian` * `uname -a` returns `Linux amnesia 5.3.0-trunk-amd64 #1 SMF Debian 5.3.2-1~exp1 (2019-10-02) x86_64 GNU/Linux` When i run ./securedrop-admin with no parameter, I get: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin help Could not find platform independent libraries <prefix> Could not find platform dependent libraries <exec_prefix> Consider setting $PYTHONHOME to <prefix>[:<exec_prefix>] Fatal Python error: Py_Initialize: Unable to get the locale encoding ImportError: No module named 'encodings' Current thread 0x00007cf687450740 (most recent call first): Aborted ```
Removing the existing admin virtualenv in `~/Persistent/securedrop/admin/.venv3` resolves the issue for me locally. It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). EDIT: Not quite, after running `./securedrop-admin setup`, invoking any other `securedrop-admin` command returns: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin -v Traceback (most recent call last): File "/home/amnesia/Persistent/securedrop/admin/.venv3/bin/securedrop-admin", line 7, in <module> exec(compile(f.read(), __file__, 'exec')) File "/home/amnesia/Persistent/securedrop/admin/bin/securedrop-admin", line 22, in <module> main(sys.argv[1:]) File "/home/amnesia/Persistent/securedrop/admin/securedrop_admin/__init__.py", line 1000, in main return_code = args.func(args) AttributeError: 'Namespace' object has no attribute 'func' ``` >It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). Yes, this is due to different Python versions, 3.5 in stretch and 3.7 in Buster. Removing the existing admin virtualenv in `~/Persistent/securedrop/admin/.venv3` resolves the issue for me locally. It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). EDIT: Not quite, after running `./securedrop-admin setup`, invoking any other `securedrop-admin` command returns: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin -v Traceback (most recent call last): File "/home/amnesia/Persistent/securedrop/admin/.venv3/bin/securedrop-admin", line 7, in <module> exec(compile(f.read(), __file__, 'exec')) File "/home/amnesia/Persistent/securedrop/admin/bin/securedrop-admin", line 22, in <module> main(sys.argv[1:]) File "/home/amnesia/Persistent/securedrop/admin/securedrop_admin/__init__.py", line 1000, in main return_code = args.func(args) AttributeError: 'Namespace' object has no attribute 'func' ``` >It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). Yes, this is due to different Python versions, 3.5 in stretch and 3.7 in Buster.
2019-10-16T15:59:48Z
[]
[]
freedomofpress/securedrop
4,916
freedomofpress__securedrop-4916
[ "4910" ]
b64b38bfe39fe7b097852eb708b72e55d93bea09
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -20,6 +20,7 @@ import argparse import logging import os +import shutil import subprocess import sys @@ -68,13 +69,48 @@ def is_tails(): id = None # dirty hack to unreliably detect Tails 4.0~beta2 - if id == 'Debian': + if id == b'Debian': if os.uname()[1] == 'amnesia': id = 'Tails' return id == 'Tails' +def clean_up_tails3_venv(virtualenv_dir=VENV_DIR): + """ + Tails 3.x, based on debian stretch uses libpython3.5, whereas Tails 4.x is + based on Debian Buster and uses libpython3.7. This means that the Tails 3.x + virtualenv will not work under Tails 4.x, and will need to be destroyed and + rebuilt. We can detect if the version of libpython is 3.5 in the + admin/.venv3/ folder, and delete it if that's the case. This will ensure a + smooth upgrade from Tails 3.x to Tails 4.x. + """ + if is_tails(): + try: + dist = subprocess.check_output('lsb_release --codename --short', + shell=True).strip() + except subprocess.CalledProcessError: + dist = None + + # tails4 is based on buster + if dist == b'buster': + python_lib_path = os.path.join(virtualenv_dir, "lib/python3.5") + if os.path.exists(os.path.join(python_lib_path)): + sdlog.info( + "Tails 3 Python 3 virtualenv detected. " + "Removing it." + ) + shutil.rmtree(virtualenv_dir) + sdlog.info("Tails 3 Python 3 virtualenv deleted.") + + +def checkenv(args): + clean_up_tails3_venv(VENV_DIR) + if not os.path.exists(os.path.join(VENV_DIR, "bin/activate")): + sdlog.error('Please run "securedrop-admin setup".') + sys.exit(1) + + def maybe_torify(): if is_tails(): return ['torify'] @@ -130,6 +166,9 @@ def envsetup(args): Ansible is available to the Admin on subsequent boots without requiring installation of packages again. """ + # clean up tails 3.x venv when migrating to tails 4.x + clean_up_tails3_venv(VENV_DIR) + # virtualenv doesnt exist? Install dependencies and create if not os.path.exists(VENV_DIR): @@ -209,18 +248,30 @@ def parse_argv(argv): help="Increase verbosity on output") parser.set_defaults(func=envsetup) + subparsers = parser.add_subparsers() + + envsetup_parser = subparsers.add_parser( + 'envsetup', + help='Set up the admin virtualenv.' + ) + envsetup_parser.set_defaults(func=envsetup) + + checkenv_parser = subparsers.add_parser( + 'checkenv', + help='Check that the admin virtualenv is properly set up.' + ) + checkenv_parser.set_defaults(func=checkenv) + return parser.parse_args(argv) if __name__ == "__main__": args = parse_argv(sys.argv[1:]) setup_logger(args.v) - if args.v: + + try: args.func(args) + except Exception: + sys.exit(1) else: - try: - args.func(args) - except Exception: - sys.exit(1) - else: - sys.exit(0) + sys.exit(0)
diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py --- a/admin/tests/test_securedrop-admin-setup.py +++ b/admin/tests/test_securedrop-admin-setup.py @@ -18,6 +18,8 @@ # import argparse +import mock +import os import pytest import subprocess @@ -74,3 +76,46 @@ def test_install_pip_dependencies_fail(self, caplog): assert 'Failed to install' in caplog.text assert 'in stdout' in caplog.text assert 'in stderr' in caplog.text + + def test_python3_stretch_venv_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(str(tmpdir), 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value=b"buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert 'Tails 3 Python 3 virtualenv detected.' in caplog.text + assert 'Tails 3 Python 3 virtualenv deleted.' in caplog.text + assert not os.path.exists(venv_path) + + def test_python3_buster_venv_not_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.7') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert ( + 'Tails 3 Python 3 virtualenv detected' not in caplog.text + ) + assert os.path.exists(venv_path) + + def test_python3_stretch_venv_not_deleted_in_stretch(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="stretch"): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path) + + def test_venv_cleanup_subprocess_exception(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError(1, + ':o')): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path)
securedrop-admin (setup) fails in Tails 4.0-rc1 ## Description Running `./securedrop-admin setup` in tails 4.0-rc1 (upgraded from 3.16) returns the following error: ``` INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ``` This was done in VMs, will try to reproduce on hardware. ## Steps to Reproduce 1. Upgrade Tails device from 3.16 to 4.0 (Boot into 4.0-rc1 drive and clone to upgrade 3.16 drive) 2. Boot into newly-upgraded tails drive 3. Verify that the internet is working (tor is bootstrapped, you can reach an external website over tor) 4. check out `1.1.0~rc2` tag 5. Run `./securedrop-admin setup` in ~/Persistent/securedrop 6. Observe error ## Expected Behavior Securedrop-admin should run and the dependencies should be installed. ## Actual Behavior Securedrop-admin fails and returns an error, the dependencies are not installed ## Comments Per https://github.com/freedomofpress/securedrop/pull/4852/files#diff-b5e536cc161fcc0d62e661b4d6eae381R70-R73 When running the commands locally, I get * `lsb_release --id --short` returns `Debian` * `uname -a` returns `Linux amnesia 5.3.0-trunk-amd64 #1 SMF Debian 5.3.2-1~exp1 (2019-10-02) x86_64 GNU/Linux` When i run ./securedrop-admin with no parameter, I get: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin help Could not find platform independent libraries <prefix> Could not find platform dependent libraries <exec_prefix> Consider setting $PYTHONHOME to <prefix>[:<exec_prefix>] Fatal Python error: Py_Initialize: Unable to get the locale encoding ImportError: No module named 'encodings' Current thread 0x00007cf687450740 (most recent call first): Aborted ```
Removing the existing admin virtualenv in `~/Persistent/securedrop/admin/.venv3` resolves the issue for me locally. It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). EDIT: Not quite, after running `./securedrop-admin setup`, invoking any other `securedrop-admin` command returns: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin -v Traceback (most recent call last): File "/home/amnesia/Persistent/securedrop/admin/.venv3/bin/securedrop-admin", line 7, in <module> exec(compile(f.read(), __file__, 'exec')) File "/home/amnesia/Persistent/securedrop/admin/bin/securedrop-admin", line 22, in <module> main(sys.argv[1:]) File "/home/amnesia/Persistent/securedrop/admin/securedrop_admin/__init__.py", line 1000, in main return_code = args.func(args) AttributeError: 'Namespace' object has no attribute 'func' ``` >It seems like Buster/tails4-specific dependencies are required to build a buster-compatible venvs. If the virtualenv created in 3.x (stretch) it doesn't seem to work in 4.x (buster). Yes, this is due to different Python versions, 3.5 in stretch and 3.7 in Buster.
2019-10-17T09:44:00Z
[]
[]
freedomofpress/securedrop
4,932
freedomofpress__securedrop-4932
[ "4929" ]
3494988e8bf203b1b8121536e6528fc244fbfbdd
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -154,7 +154,7 @@ def install_apt_dependencies(args): raise -def envsetup(args): +def envsetup(args, virtualenv_dir=VENV_DIR): """Installs Admin tooling required for managing SecureDrop. Specifically: * updates apt-cache @@ -167,10 +167,10 @@ def envsetup(args): installation of packages again. """ # clean up tails 3.x venv when migrating to tails 4.x - clean_up_tails3_venv(VENV_DIR) + clean_up_tails3_venv(virtualenv_dir) # virtualenv doesnt exist? Install dependencies and create - if not os.path.exists(VENV_DIR): + if not os.path.exists(virtualenv_dir): install_apt_dependencies(args) @@ -181,12 +181,18 @@ def envsetup(args): sdlog.info("Setting up virtualenv") try: sdlog.debug(subprocess.check_output( - maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR], + maybe_torify() + ['virtualenv', + '--python=python3', + virtualenv_dir + ], stderr=subprocess.STDOUT)) except subprocess.CalledProcessError as e: sdlog.debug(e.output) sdlog.error(("Unable to create virtualenv. Check network settings" " and try again.")) + sdlog.debug("Cleaning up virtualenv") + if os.path.exists(virtualenv_dir): + shutil.rmtree(virtualenv_dir) raise else: sdlog.info("Virtualenv already exists, not creating")
diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py --- a/admin/tests/test_securedrop-admin-setup.py +++ b/admin/tests/test_securedrop-admin-setup.py @@ -119,3 +119,19 @@ def test_venv_cleanup_subprocess_exception(self, tmpdir, caplog): ':o')): bootstrap.clean_up_tails3_venv(venv_path) assert os.path.exists(venv_path) + + def test_envsetup_cleanup(self, tmpdir, caplog): + venv = os.path.join(str(tmpdir), "empty_dir") + args = "" + with pytest.raises(subprocess.CalledProcessError): + with mock.patch('subprocess.check_output', + side_effect=self.side_effect_venv_bootstrap(venv)): + bootstrap.envsetup(args, venv) + assert not os.path.exists(venv) + assert 'Cleaning up virtualenv' in caplog.text + + def side_effect_venv_bootstrap(self, venv_path): + # emulate the venv being created, and raise exception to simulate + # failure in virtualenv creation + os.makedirs(venv_path) + raise subprocess.CalledProcessError(1, ':o')
Python3 admin virtualenv is broken if improperly set up (doesn't contain pip3) ## Description Initially discovered while reviewing https://github.com/freedomofpress/securedrop/pull/4927#issuecomment-543763957 , if the admin virtualenv is improperly setup, one must manually remove `admin/.venv3` to fix the virtualenv. ## Steps to Reproduce This is very hard to reproduce, but you can do it by pressing ctrl+c when the virtualenv is being setup as part of an initial `./securedrop-admin setup` run. Subsequent `./securedrop-admin setup` invocations will no longer work ## Expected Behavior `./securedrop-admin setup` should not fail ## Actual Behavior `./securedrop-admin setup` fails with the following output: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin setup INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ```
2019-10-18T17:51:14Z
[]
[]
freedomofpress/securedrop
4,933
freedomofpress__securedrop-4933
[ "4930" ]
3494988e8bf203b1b8121536e6528fc244fbfbdd
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -89,45 +89,6 @@ def validate(self, document): raise ValidationError( message="An IP address must be something like 10.240.20.83") - class ValidateDNS(Validator): - def validate(self): - raise Exception() # pragma: no cover - - def is_tails(self): - try: - id = subprocess.check_output('lsb_release --id --short', - shell=True).strip() - except subprocess.CalledProcessError: - id = None - return id == 'Tails' - - def lookup_fqdn(self, fqdn, dns=None): - cmd = 'host -W=10 -T -4 ' + fqdn - if self.is_tails(): - cmd = 'torify ' + cmd - cmd += ' ' + (dns and dns or '8.8.8.8') - try: - result = subprocess.check_output(cmd.split(' '), - stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - result = e.output - sdlog.debug(cmd + ' => ' + result) - return 'has address' in result - - class ValidateDNSServer(ValidateDNS): - def validate(self, document): - if self.lookup_fqdn('gnu.org', document.text): - return True - raise ValidationError( - message='Unable to resolve gnu.org using this DNS') - - class ValidateFQDN(ValidateDNS): - def validate(self, document): - if self.lookup_fqdn(document.text): - return True - raise ValidationError( - message='Unable to resolve ' + document.text) - class ValidatePath(Validator): def __init__(self, basedir): self.basedir = basedir
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py --- a/admin/tests/test_securedrop-admin.py +++ b/admin/tests/test_securedrop-admin.py @@ -445,63 +445,6 @@ def test_validate_optional_email(self): assert validator.validate(Document('[email protected]')) assert validator.validate(Document('')) - def test_is_tails(self): - validator = securedrop_admin.SiteConfig.ValidateDNS() - with mock.patch('subprocess.check_output', return_value='Tails'): - assert validator.is_tails() - with mock.patch('subprocess.check_output', return_value='Debian'): - assert validator.is_tails() is False - with mock.patch('subprocess.check_output', - side_effect=subprocess.CalledProcessError( - 1, 'cmd', 'BANG')): - assert validator.is_tails() is False - - def test_lookup_dns(self, caplog): - validator = securedrop_admin.SiteConfig.ValidateDNS() - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails', - return_value=True): - with mock.patch('subprocess.check_output', - return_value='has address') as check_output: - assert validator.lookup_fqdn('gnu.org', '8.8.8.8') - assert check_output.call_args[0][0][0] == 'torify' - assert check_output.call_args[0][0][6] == '8.8.8.8' - - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails', - return_value=False): - with mock.patch('subprocess.check_output', - return_value='failed') as check_output: - assert validator.lookup_fqdn('gnu.org') is False - assert not check_output.call_args[0][0][0] == 'torify' - assert 'failed' in caplog.text - - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails', - return_value=False): - with mock.patch('subprocess.check_output', - side_effect=subprocess.CalledProcessError( - 1, 'cmd', 'BANG')): - assert validator.lookup_fqdn('gnu.org') is False - assert 'BANG' in caplog.text - - def test_validate_dns_server(self, caplog): - validator = securedrop_admin.SiteConfig.ValidateDNSServer() - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn', - return_value=True): - assert validator.validate(Document('8.8.8.8')) - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn', - return_value=False): - with pytest.raises(ValidationError): - validator.validate(Document('8.8.8.8')) - - def test_lookup_fqdn(self, caplog): - validator = securedrop_admin.SiteConfig.ValidateFQDN() - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn', - return_value=True): - assert validator.validate(Document('gnu.org')) - with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn', - return_value=False): - with pytest.raises(ValidationError): - assert validator.validate(Document('gnu.org')) - def test_validate_user(self): validator = securedrop_admin.SiteConfig.ValidateUser() with pytest.raises(ValidationError):
dead (and non-functional) code in securedrop-admin ## Description `ValidateDNS`, `ValidateDNSServer`, `ValidateFQDN` appear to be unused in `securedrop_admin.__init__.py`. They also rely on logic that does not currently work with Python 3 (e.g. the `is_tails` method). We should remove these classes.
2019-10-18T18:55:21Z
[]
[]
freedomofpress/securedrop
4,935
freedomofpress__securedrop-4935
[ "4929" ]
51d4bb23c0fe4ad32d7d144e59ff7296fbfaba71
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -154,7 +154,7 @@ def install_apt_dependencies(args): raise -def envsetup(args): +def envsetup(args, virtualenv_dir=VENV_DIR): """Installs Admin tooling required for managing SecureDrop. Specifically: * updates apt-cache @@ -167,10 +167,10 @@ def envsetup(args): installation of packages again. """ # clean up tails 3.x venv when migrating to tails 4.x - clean_up_tails3_venv(VENV_DIR) + clean_up_tails3_venv(virtualenv_dir) # virtualenv doesnt exist? Install dependencies and create - if not os.path.exists(VENV_DIR): + if not os.path.exists(virtualenv_dir): install_apt_dependencies(args) @@ -181,12 +181,18 @@ def envsetup(args): sdlog.info("Setting up virtualenv") try: sdlog.debug(subprocess.check_output( - maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR], + maybe_torify() + ['virtualenv', + '--python=python3', + virtualenv_dir + ], stderr=subprocess.STDOUT)) except subprocess.CalledProcessError as e: sdlog.debug(e.output) sdlog.error(("Unable to create virtualenv. Check network settings" " and try again.")) + sdlog.debug("Cleaning up virtualenv") + if os.path.exists(virtualenv_dir): + shutil.rmtree(virtualenv_dir) raise else: sdlog.info("Virtualenv already exists, not creating")
diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py --- a/admin/tests/test_securedrop-admin-setup.py +++ b/admin/tests/test_securedrop-admin-setup.py @@ -119,3 +119,19 @@ def test_venv_cleanup_subprocess_exception(self, tmpdir, caplog): ':o')): bootstrap.clean_up_tails3_venv(venv_path) assert os.path.exists(venv_path) + + def test_envsetup_cleanup(self, tmpdir, caplog): + venv = os.path.join(str(tmpdir), "empty_dir") + args = "" + with pytest.raises(subprocess.CalledProcessError): + with mock.patch('subprocess.check_output', + side_effect=self.side_effect_venv_bootstrap(venv)): + bootstrap.envsetup(args, venv) + assert not os.path.exists(venv) + assert 'Cleaning up virtualenv' in caplog.text + + def side_effect_venv_bootstrap(self, venv_path): + # emulate the venv being created, and raise exception to simulate + # failure in virtualenv creation + os.makedirs(venv_path) + raise subprocess.CalledProcessError(1, ':o')
Python3 admin virtualenv is broken if improperly set up (doesn't contain pip3) ## Description Initially discovered while reviewing https://github.com/freedomofpress/securedrop/pull/4927#issuecomment-543763957 , if the admin virtualenv is improperly setup, one must manually remove `admin/.venv3` to fix the virtualenv. ## Steps to Reproduce This is very hard to reproduce, but you can do it by pressing ctrl+c when the virtualenv is being setup as part of an initial `./securedrop-admin setup` run. Subsequent `./securedrop-admin setup` invocations will no longer work ## Expected Behavior `./securedrop-admin setup` should not fail ## Actual Behavior `./securedrop-admin setup` fails with the following output: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin setup INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ```
2019-10-18T19:37:51Z
[]
[]
freedomofpress/securedrop
5,046
freedomofpress__securedrop-5046
[ "5009" ]
70b598561bce683569d27f9885f0079e6a6f9472
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -59,6 +59,7 @@ def update_submission_preferences(): form = SubmissionPreferencesForm() if form.validate_on_submit(): # The UI prompt ("prevent") is the opposite of the setting ("allow"): + flash(gettext("Preferences saved."), "submission-preferences-success") value = not bool(request.form.get('prevent_document_uploads')) InstanceConfig.set('allow_document_uploads', value) return redirect(url_for('admin.manage_config'))
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -287,11 +287,21 @@ def _admin_disallows_document_uploads(self): self.safe_click_by_id("prevent_document_uploads") self.safe_click_by_id("submit-submission-preferences") + def preferences_saved(): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Preferences saved." in flash_msg.text + self.wait_for(preferences_saved, timeout=self.timeout * 6) + def _admin_allows_document_uploads(self): if self.driver.find_element_by_id("prevent_document_uploads").is_selected(): self.safe_click_by_id("prevent_document_uploads") self.safe_click_by_id("submit-submission-preferences") + def preferences_saved(): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Preferences saved." in flash_msg.text + self.wait_for(preferences_saved, timeout=self.timeout * 6) + def _add_user(self, username, first_name="", last_name="", is_admin=False, hotp=None): self.safe_send_keys_by_css_selector('input[name="username"]', username) diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1297,6 +1297,11 @@ def test_prevent_document_uploads(journalist_app, test_admin): data=form.data, follow_redirects=True) assert InstanceConfig.get_current().allow_document_uploads is False + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.update_submission_preferences'), + data=form.data, + follow_redirects=True) + ins.assert_message_flashed('Preferences saved.', 'submission-preferences-success') def test_no_prevent_document_uploads(journalist_app, test_admin): @@ -1306,6 +1311,10 @@ def test_no_prevent_document_uploads(journalist_app, test_admin): app.post(url_for('admin.update_submission_preferences'), follow_redirects=True) assert InstanceConfig.get_current().allow_document_uploads is True + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.update_submission_preferences'), + follow_redirects=True) + ins.assert_message_flashed('Preferences saved.', 'submission-preferences-success') def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin):
Add confirmation message after updating submission preferences #4879 made it possible to prevent sources from uploading documents by changing a setting in the *Admin Interface*: ![Screen Shot 2019-11-19 at 16 41 15-fullpage](https://user-images.githubusercontent.com/213636/69203093-51470f00-0af8-11ea-9394-2f6349bd646e.png) However, there is no confirmation message after changing this setting. At minimum, we should add a generic flashed message, which we could use when future preferences added to this page are changed, e.g., "Settings have been updated successfully." # User Story As an admin, I want to know at a glance that my actions on the preferences page have had the desired effect, so that I don't have to re-examine the state of specific checkboxes to be sure.
Hi, first contributor to securedrop here. I'd like to have a go at implementing this if possible! To me it seems like the most sensible approach would be to implement this entirely with client-side Javascript, changing the behaviour of the form dynamically and including a popup or flash on successful submission. Any suggestions are appreciated. Hello @Petvdp, welcome!! :) I don't believe that any Javascript is used in the Admin Interface, today; most of SecureDrop avoids Javascript dependencies, for security reasons. Technical contributors can speak more to implementation details. I'm a UX contributor, and am in the process of finalizing the updated styling for all flash messaging. I'll be happy to share those updated styles, here, sometime next week (or sooner—but am trying to be conservative, for my own sanity). We'd all love to see this page get pushed past the finish line with this Issue being addressed, and I'd love to see the updated messaging style get implemented. Jen or Erik or others can speak more to timing and technical particulars. Anywhoo, glad to see you here! More later... Hi @ninavizz, thanks for the intro! I figured this would be a good opportunity to learn a few things about security concious programming while building something that's useful. Yeah I can definitely see why you would want to avoid Javascript wherever possible, and definitely only ever Vanilla. It would also be bad to mess up the http request somehow. There are potential serverside approaches to this feature, but I can't think of any that don't involve modifying application state in the page's GET request, which would violate REST, since the server has to now know about whether or not the client is reloading the page after making the request. Hi @petevdp, to resolve this issue, it should be sufficient to use the existing message flashing code and notification styling, and to add a flash upon a POST request to the `/update-submission-preferences` route. Check out example flashed messages in the `admin` routes (`securedrop/journalist_app/admin.py`). Okay, I'll give that a shot, thanks!
2019-11-29T00:10:06Z
[]
[]
freedomofpress/securedrop
5,067
freedomofpress__securedrop-5067
[ "4878" ]
bb2f04aae68f1351f620006e3d64b6dce02f3228
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py --- a/journalist_gui/journalist_gui/SecureDropUpdater.py +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -7,6 +7,7 @@ import pexpect import socket import sys +import syslog as log from journalist_gui import updaterUI, strings, resources_rc # noqa @@ -35,9 +36,7 @@ def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # app.instance_binding.bind(IDENTIFIER) except OSError as e: if e.errno == ALREADY_BOUND_ERRNO: - err_dialog = QtWidgets.QMessageBox() - err_dialog.setText(name + strings.app_is_already_running) - err_dialog.exec() + log.syslog(log.LOG_NOTICE, name + strings.app_is_already_running) sys.exit() else: raise
diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py --- a/journalist_gui/test_gui.py +++ b/journalist_gui/test_gui.py @@ -13,7 +13,7 @@ @mock.patch('journalist_gui.SecureDropUpdater.sys.exit') [email protected]('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') [email protected]('syslog.syslog') class TestSecondInstancePrevention(unittest.TestCase): def setUp(self): self.mock_app = mock.MagicMock()
Graphical updater "is already running" dialog during late stages of the update process ## Description The graphical updater briefly shows an "is already running" dialog during the last stages of the process. The dialog can be dismissed and has no negative consequences. Tested on Tails 3.16. ## Steps to Reproduce Perform a full graphical update using the graphical updater, with the Tails admin password set. ## Expected Behavior The graphical update completes without any unusual messages. ## Actual Behavior An "already running" dialog briefly appears towards the end of the process.
This happens because the network hook is now always triggered by tailsconfig, rather than only on a change in the network hook script (consequence of v3 services.) If it's a problem, probably the easiest thing to do would be to have the second copy of securedropupdater just exit silently.
2019-12-06T16:40:32Z
[]
[]
freedomofpress/securedrop
5,074
freedomofpress__securedrop-5074
[ "4757" ]
2300bdfa28dd0739d1e6c413de60fd805891913c
diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py --- a/securedrop/source_app/api.py +++ b/securedrop/source_app/api.py @@ -3,6 +3,8 @@ from flask import Blueprint, current_app, make_response +from source_app.utils import get_sourcev2_url, get_sourcev3_url + import version @@ -16,7 +18,9 @@ def metadata(): 'gpg_fpr': config.JOURNALIST_KEY, 'sd_version': version.__version__, 'server_os': platform.linux_distribution()[1], - 'supported_languages': config.SUPPORTED_LOCALES + 'supported_languages': config.SUPPORTED_LOCALES, + 'v2_source_url': get_sourcev2_url(), + 'v3_source_url': get_sourcev3_url() } resp = make_response(json.dumps(meta)) resp.headers['Content-Type'] = 'application/json' diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py --- a/securedrop/source_app/utils.py +++ b/securedrop/source_app/utils.py @@ -9,6 +9,7 @@ from threading import Thread import i18n +import re from crypto_util import CryptoException from models import Source @@ -112,3 +113,31 @@ def normalize_timestamps(filesystem_id): "Couldn't normalize submission " "timestamps (touch exited with %d)" % rc) + + +def check_url_file(path, regexp): + """ + Check that a file exists at the path given and contains a single line + matching the regexp. Used for checking the source interface address + files at /var/lib/securedrop/source_{v2,v3}_url. + """ + try: + f = open(path, "r") + contents = f.readline().strip() + f.close() + if re.match(regexp, contents): + return contents + else: + return None + except IOError: + return None + + +def get_sourcev2_url(): + return check_url_file("/var/lib/securedrop/source_v2_url", + r"^[a-z0-9]{16}\.onion$") + + +def get_sourcev3_url(): + return check_url_file("/var/lib/securedrop/source_v3_url", + r"^[a-z0-9]{56}\.onion$")
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -558,6 +558,32 @@ def test_metadata_route(config, source_app): assert resp.json.get('server_os') == '16.04' assert resp.json.get('supported_languages') ==\ config.SUPPORTED_LOCALES + assert resp.json.get('v2_source_url') is None + assert resp.json.get('v3_source_url') is None + + +def test_metadata_v2_url(config, source_app): + onion_test_url = "abcdabcdabcdabcd.onion" + with patch.object(source_app_api, "get_sourcev2_url") as mocked_v2_url: + mocked_v2_url.return_value = (onion_test_url) + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert resp.json.get('v2_source_url') == onion_test_url + assert resp.json.get('v3_source_url') is None + + +def test_metadata_v3_url(config, source_app): + onion_test_url = "abcdefghabcdefghabcdefghabcdefghabcdefghabcdefghabcdefgh.onion" + with patch.object(source_app_api, "get_sourcev3_url") as mocked_v3_url: + mocked_v3_url.return_value = (onion_test_url) + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert resp.json.get('v2_source_url') is None + assert resp.json.get('v3_source_url') == onion_test_url def test_login_with_overly_long_codename(source_app): diff --git a/securedrop/tests/test_source_utils.py b/securedrop/tests/test_source_utils.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_source_utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +import os + +from source_app.utils import check_url_file + + +def test_check_url_file(config): + + assert check_url_file("nosuchfile", "whatever") is None + + try: + def write_url_file(path, content): + url_file = open(path, "w") + url_file.write("{}\n".format(content)) + + url_path = "test_source_url" + + onion_test_url = "abcdabcdabcdabcd.onion" + write_url_file(url_path, onion_test_url) + assert check_url_file(url_path, r"^[a-z0-9]{16}\.onion$") == onion_test_url + + onion_test_url = "abcdefghabcdefghabcdefghabcdefghabcdefghabcdefghabcdefgh.onion" + write_url_file(url_path, onion_test_url) + assert check_url_file(url_path, r"^[a-z0-9]{56}\.onion$") == onion_test_url + + write_url_file(url_path, "NO.onion") + assert check_url_file(url_path, r"^[a-z0-9]{56}\.onion$") is None + finally: + if os.path.exists(url_path): + os.unlink(url_path)
Add v2 and v3 source interface addresses to metadata endpoint ## Description The Source Interface exposes a `/metadata` endpoint that includes information about an instance's OS and application version, supported languages, and submission key fingerprint, which is useful for Nagios monitoring purposes among other things. Adding the SI addresses in this endpoint would a) allow FPF to monitor v3 service uptake and update Nagios checks accordingly, and b) allow end users on the v2 version of service to verify the correct v3 address for the service (as an alternative or supplement to automatic redirection via the Alt-SVC header). Potential downside: if an admin turns on v3 but doesn't want to advertise that they've done so, this could inadvertently expose the v3 address. ## User Research Evidence I have none but folks seem to like the idea on Gitter. ## User Stories - As an FPF support team member, I'd like to be able to have v3 service information available for monitoring purposes - as a SecureDrop user, I'd like to be able to verify the correct v3 address corresponding to a v2 address.
2019-12-11T00:18:33Z
[]
[]
freedomofpress/securedrop
5,075
freedomofpress__securedrop-5075
[ "4458" ]
2300bdfa28dd0739d1e6c413de60fd805891913c
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -2,6 +2,7 @@ import os import io +from base64 import urlsafe_b64encode from datetime import datetime from flask import (Blueprint, render_template, flash, redirect, url_for, g, session, current_app, request, Markup, abort) @@ -37,9 +38,17 @@ def generate(): return redirect(url_for('.lookup')) codename = generate_unique_codename(config) - session['codename'] = codename + + # Generate a unique id for each browser tab and associate the codename with this id. + # This will allow retrieval of the codename displayed in the tab from which the source has + # clicked to proceed to /generate (ref. issue #4458) + tab_id = urlsafe_b64encode(os.urandom(64)).decode() + codenames = session.get('codenames', {}) + codenames[tab_id] = codename + session['codenames'] = codenames + session['new_user'] = True - return render_template('generate.html', codename=codename) + return render_template('generate.html', codename=codename, tab_id=tab_id) @view.route('/org-logo') def select_logo(): @@ -51,33 +60,43 @@ def select_logo(): @view.route('/create', methods=['POST']) def create(): - filesystem_id = current_app.crypto_util.hash_codename( - session['codename']) - - source = Source(filesystem_id, current_app.crypto_util.display_id()) - db.session.add(source) - try: - db.session.commit() - except IntegrityError as e: - db.session.rollback() - current_app.logger.error( - "Attempt to create a source with duplicate codename: %s" % - (e,)) - - # Issue 2386: don't log in on duplicates - del session['codename'] - - # Issue 4361: Delete 'logged_in' if it's in the session - try: - del session['logged_in'] - except KeyError: - pass - - abort(500) + if session.get('logged_in', False): + flash(gettext("You are already logged in. Please verify your codename below as it " + + "may differ from the one displayed on the previous page."), + 'notification') else: - os.mkdir(current_app.storage.path(filesystem_id)) + tab_id = request.form['tab_id'] + codename = session['codenames'][tab_id] + session['codename'] = codename + + del session['codenames'] + + filesystem_id = current_app.crypto_util.hash_codename(codename) + + source = Source(filesystem_id, current_app.crypto_util.display_id()) + db.session.add(source) + try: + db.session.commit() + except IntegrityError as e: + db.session.rollback() + current_app.logger.error( + "Attempt to create a source with duplicate codename: %s" % + (e,)) + + # Issue 2386: don't log in on duplicates + del session['codename'] + + # Issue 4361: Delete 'logged_in' if it's in the session + try: + del session['logged_in'] + except KeyError: + pass + + abort(500) + else: + os.mkdir(current_app.storage.path(filesystem_id)) - session['logged_in'] = True + session['logged_in'] = True return redirect(url_for('.lookup')) @view.route('/lookup', methods=('GET',))
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -40,6 +40,9 @@ def _source_clicks_submit_documents_on_homepage(self): # a diceware codename they can use for subsequent logins assert self._is_on_generate_page() + def _source_regenerates_codename(self): + self.safe_click_by_id("regenerate-submit") + def _source_chooses_to_submit_documents(self): self._source_clicks_submit_documents_on_homepage() @@ -48,7 +51,7 @@ def _source_chooses_to_submit_documents(self): assert len(codename.text) > 0 self.source_name = codename.text - def _source_shows_codename(self): + def _source_shows_codename(self, verify_source_name=True): content = self.driver.find_element_by_id("codename-hint-content") assert not content.is_displayed() @@ -57,7 +60,8 @@ def _source_shows_codename(self): self.wait_for(lambda: content.is_displayed()) assert content.is_displayed() content_content = self.driver.find_element_by_css_selector("#codename-hint-content p") - assert content_content.text == self.source_name + if verify_source_name: + assert content_content.text == self.source_name def _source_hides_codename(self): content = self.driver.find_element_by_id("codename-hint-content") @@ -223,3 +227,17 @@ def _source_sees_document_attachment_item(self): def _source_does_not_sees_document_attachment_item(self): with pytest.raises(NoSuchElementException): self.driver.find_element_by_class_name("attachment") + + def _source_sees_already_logged_in_in_other_tab_message(self): + notification = self.driver.find_element_by_css_selector(".notification") + + if not hasattr(self, "accepted_languages"): + expected_text = "You are already logged in." + assert expected_text in notification.text + + def _source_sees_redirect_already_logged_in_message(self): + notification = self.driver.find_element_by_css_selector(".notification") + + if not hasattr(self, "accepted_languages"): + expected_text = "You were redirected because you are already logged in." + assert expected_text in notification.text diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -29,3 +29,106 @@ def test_journalist_key_from_source_interface(self): data = data.decode('utf-8') assert "BEGIN PGP PUBLIC KEY BLOCK" in data + + +class TestDuplicateSourceInterface( + functional_test.FunctionalTest, + source_navigation_steps.SourceNavigationStepsMixin): + + def get_codename_generate(self): + return self.driver.find_element_by_css_selector("#codename").text + + def get_codename_lookup(self): + return self.driver.find_element_by_css_selector("#codename-hint-content p").text + + def test_duplicate_generate_pages(self): + # Test generation of multiple codenames in different browser tabs, ref. issue 4458. + + # Generate a codename in Tab A + assert len(self.driver.window_handles) == 1 + tab_a = self.driver.current_window_handle + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_a = self.get_codename_generate() + + # Generate a different codename in Tab B + self.driver.execute_script("window.open()") + tab_b = self.driver.window_handles[1] + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_b = self.get_codename_generate() + + assert tab_a != tab_b + assert codename_a != codename_b + + # Proceed to submit documents in Tab A + self.driver.switch_to.window(tab_a) + assert self.driver.current_window_handle == tab_a + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_shows_codename(verify_source_name=False) + codename_lookup_a = self.get_codename_lookup() + assert codename_lookup_a == codename_a + self._source_submits_a_message() + + # Proceed to submit documents in Tab B + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_sees_already_logged_in_in_other_tab_message() + self._source_shows_codename(verify_source_name=False) + codename_lookup_b = self.get_codename_lookup() + # We expect the codename to be the one from Tab A + assert codename_lookup_b == codename_a + self._source_submits_a_message() + + def test_duplicate_generate_pages_with_refresh(self): + # Test generation of multiple codenames in different browser tabs, including behavior + # of refreshing the codemae in each tab. Ref. issue 4458. + + # Generate a codename in Tab A + assert len(self.driver.window_handles) == 1 + tab_a = self.driver.current_window_handle + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_a1 = self.get_codename_generate() + # Regenerate codename in Tab A + self._source_regenerates_codename() + codename_a2 = self.get_codename_generate() + assert codename_a1 != codename_a2 + + # Generate a different codename in Tab B + self.driver.execute_script("window.open()") + tab_b = self.driver.window_handles[1] + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_b = self.get_codename_generate() + assert codename_b != codename_a1 != codename_a2 + + # Proceed to submit documents in Tab A + self.driver.switch_to.window(tab_a) + assert self.driver.current_window_handle == tab_a + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_shows_codename(verify_source_name=False) + codename_lookup_a = self.get_codename_lookup() + assert codename_lookup_a == codename_a2 + self._source_submits_a_message() + + # Regenerate codename in Tab B + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_regenerates_codename() + # We expect the source to be directed to /lookup with a flash message + assert self._is_on_lookup_page() + self._source_sees_redirect_already_logged_in_message() + # Check codename + self._source_shows_codename(verify_source_name=False) + codename_lookup_b = self.get_codename_lookup() + assert codename_lookup_b == codename_a2 + self._source_submits_a_message() diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -56,7 +56,8 @@ def test_submit_message(source_app, journalist_app, test_journo): with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) filesystem_id = g.filesystem_id # redirected to submission form resp = app.post('/submit', data=dict( @@ -153,7 +154,8 @@ def test_submit_file(source_app, journalist_app, test_journo): with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) filesystem_id = g.filesystem_id # redirected to submission form resp = app.post('/submit', data=dict( @@ -254,7 +256,8 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) codename = session['codename'] filesystem_id = g.filesystem_id # redirected to submission form @@ -474,7 +477,8 @@ def test_delete_collection(mocker, source_app, journalist_app, test_journo): # first, add a source with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) resp = app.post('/submit', data=dict( msg="This is a test.", fh=(BytesIO(b''), ''), @@ -523,7 +527,8 @@ def test_delete_collections(mocker, journalist_app, source_app, test_journo): num_sources = 2 for i in range(num_sources): app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) app.post('/submit', data=dict( msg="This is a test " + str(i) + ".", fh=(BytesIO(b''), ''), @@ -577,7 +582,8 @@ def test_filenames(source_app, journalist_app, test_journo): # add a source and submit stuff with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) _helper_filenames_submit(app) # navigate to the collection page @@ -603,7 +609,8 @@ def test_filenames_delete(journalist_app, source_app, test_journo): # add a source and submit stuff with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) _helper_filenames_submit(app) # navigate to the collection page @@ -714,7 +721,8 @@ def test_prevent_document_uploads(source_app, journalist_app, test_admin): # Check that the source interface accepts only messages: with source_app.test_client() as app: app.get('/generate') - resp = app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') @@ -739,7 +747,8 @@ def test_no_prevent_document_uploads(source_app, journalist_app, test_admin): # Check that the source interface accepts both files and messages: with source_app.test_client() as app: app.get('/generate') - resp = app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -83,7 +83,7 @@ def test_generate(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 - session_codename = session['codename'] + session_codename = next(iter(session['codenames'].values())) text = resp.data.decode('utf-8') assert "This codename is what you will use in future visits" in text @@ -113,11 +113,13 @@ def test_create_new_source(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 - resp = app.post(url_for('main.create'), follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) assert session['logged_in'] is True # should be redirected to /lookup text = resp.data.decode('utf-8') assert "Submit Files" in text + assert 'codenames' not in session def test_generate_too_long_codename(source_app): @@ -143,17 +145,18 @@ def test_create_duplicate_codename_logged_in_not_in_session(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) # Create a source the first time - resp = app.post(url_for('main.create'), follow_redirects=True) + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 codename = session['codename'] with source_app.test_client() as app: # Attempt to add the same source with app.session_transaction() as sess: - sess['codename'] = codename - resp = app.post(url_for('main.create'), follow_redirects=True) + sess['codenames'] = {tab_id: codename} + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) logger.assert_called_once() assert ("Attempt to create a source with duplicate codename" in logger.call_args[0][0]) @@ -163,26 +166,31 @@ def test_create_duplicate_codename_logged_in_not_in_session(source_app): def test_create_duplicate_codename_logged_in_in_session(source_app): - with patch.object(source.app.logger, 'error') as logger: - with source_app.test_client() as app: - resp = app.get(url_for('main.generate')) - assert resp.status_code == 200 - - # Create a source the first time - resp = app.post(url_for('main.create'), follow_redirects=True) - assert resp.status_code == 200 + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) - # Attempt to add the same source - resp = app.post(url_for('main.create'), follow_redirects=True) - logger.assert_called_once() - assert ("Attempt to create a source with duplicate codename" - in logger.call_args[0][0]) - assert resp.status_code == 500 - assert 'codename' not in session + # Create a source the first time + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) + assert resp.status_code == 200 + codename = session['codename'] + logged_in = session['logged_in'] - # Reproducer for bug #4361 - resp = app.post(url_for('main.index'), follow_redirects=True) - assert 'logged_in' not in session + # Attempt to add another source in the same session + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) + with app.session_transaction() as sess: + sess['codename'] = codename + sess['logged_in'] = logged_in + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) + assert resp.status_code == 200 + assert session['codename'] == codename + text = resp.data.decode('utf-8') + assert "You are already logged in." in text + assert "Submit Files" in text def test_lookup(source_app): diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -173,6 +173,6 @@ def new_codename(client, session): """Helper function to go through the "generate codename" flow. """ client.get('/generate') - codename = session['codename'] - client.post('/create') + tab_id, codename = next(iter(session['codenames'].items())) + client.post('/create', data={'tab_id': tab_id}) return codename
Duplicate Source Interface session triggers server error ## Description Initiating more than one SecureDrop session in the Source Interface at a time causes a Server Error for the second session, and causes the first session to be invalidated. ## Steps to Reproduce 1. Launch the Docker-based SecureDrop development environment; 2. Visit http://localhost:8080/generate twice, in two separate private browser tabs. 3. Click "Submit" in the first tab 4. Click "Submit" in the second tab ## Expected Behavior I can submit messages or documents. ## Actual Behavior In the second tab, the following error message appears: > Server error > Sorry, the website encountered an error and was unable to complete your request. In the first tab, subsequent page loads take me to the login screen: http://localhost:8080/login
It's not entirely clear to me what the correct behavior in this case _should_ be. Intuitively, I think it should invalidate the older session, in the unlikely even that it was initiated by a different user, and start a new session. I've started investigating this issue. My understanding so far is that, despite using two private browser windows or tabs, both share the same session and the [`session['codename']`](https://github.com/freedomofpress/securedrop/blob/62f7d224217add9959ef0708b00ab2b3ccd810f4/securedrop/source_app/main.py#L40) variable gets the value of the last codename generated (whether through opening a new window or tab, or by clicking the button to generate a new codename in any of the open windows or tabs). For example, if I perform the following steps in order: 1. Click "Get Started" in *window_1*, 1. Click "Get Started" in *window_2*, 1. Click "Submit Documents" in *window_1*, 1. Click "Show" (codename) in *window_1* *window_1* shows the codemane that is displayed in *window_2*. I can then proceed with submitting a document in *window_1* however if I noted the codename shown in *window_1* at step 1 above, I will not be able to log back-in using that codename since the actual codename that was in the `session['codename']` variable when the new Source entry was generated is the one generated in *window_2* (generated after the one in *window_1*). The error (satus code 500) that occurs following the steps in "Steps to Reproduce" above is because the codename from *window_2* already exists when the [`/create`](https://github.com/freedomofpress/securedrop/blob/62f7d224217add9959ef0708b00ab2b3ccd810f4/securedrop/source_app/main.py#L52) route is executed from *window_2* and we try to add a new Source with a codename that already exists (was added with "Submit Documents" was clicked in *window_1*). Having the possibility of generating multiple codenames in multiple windows before the source logs-in presents a significant risk that the source keeps note of a different codename than the one actually used to create the Source database entry. I think the safest way to prevent this is to only allow one `/generate` window until the user has logged-in by clicking the "Submit Documents" button. To achieve this, the `/generate` route could check for the case where `session['codename']` exists but the user is not logged-in. In this case, the user would be redirected to the index with an appropriate flash message. Any comments / issues with this proposal? nice investigation > presents a significant risk that the source keeps note of a different codename than the one actually used to create the Source database entry. indeed, unless they pay special attention to the part of the `/lookup` page that has their codename for reference (available on the first login only and they might not look at it) they might find themselves unable to login again. > the `/generate` route could check for the case where `session['codename']` exists but the user is not logged-in. In this case, the user would be redirected to the index with an appropriate flash message. this sounds like a reasonable approach!
2019-12-14T18:31:51Z
[]
[]
freedomofpress/securedrop
5,116
freedomofpress__securedrop-5116
[ "4952" ]
1e84901f8cb97087db8b1ee6bb39d5231fab36e2
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -296,8 +296,12 @@ def login(): @view.route('/logout') def logout(): + """ + If a user is logged in, show them a logout page that prompts them to + click the New Identity button in Tor Browser to complete their session. + Otherwise redirect to the main Source Interface page. + """ if logged_in(): - msg = render_template('logout_flashed_message.html') # Clear the session after we render the message so it's localized # If a user specified a locale, save it and restore it @@ -305,7 +309,8 @@ def logout(): session.clear() session['locale'] = user_locale - flash(Markup(msg), "important hide-if-not-tor-browser") - return redirect(url_for('.index')) + return render_template('logout.html') + else: + return redirect(url_for('.index')) return view
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -19,6 +19,9 @@ def _is_on_lookup_page(self): def _is_on_generate_page(self): return self.wait_for(lambda: self.driver.find_element_by_id("create-form")) + def _is_on_logout_page(self): + return self.wait_for(lambda: self.driver.find_element_by_id("click-new-identity-tor")) + def _source_visits_source_homepage(self): self.driver.get(self.source_location) assert self._is_on_source_homepage() @@ -195,7 +198,7 @@ def reply_deleted(): def _source_logs_out(self): self.safe_click_by_id("logout") - self.wait_for(lambda: ("Submit for the first time" in self.driver.page_source)) + assert self._is_on_logout_page() def _source_not_found(self): self.driver.get(self.source_location + "/unlikely") @@ -218,7 +221,7 @@ def _source_sees_session_timeout_message(self): notification = self.driver.find_element_by_css_selector(".important") if not hasattr(self, "accept_languages"): - expected_text = "Your session timed out due to inactivity." + expected_text = "You were logged out due to inactivity." assert expected_text in notification.text def _source_sees_document_attachment_item(self): diff --git a/securedrop/tests/pageslayout/test_source.py b/securedrop/tests/pageslayout/test_source.py --- a/securedrop/tests/pageslayout/test_source.py +++ b/securedrop/tests/pageslayout/test_source.py @@ -153,11 +153,11 @@ def test_index(self): self._source_visits_source_homepage() self._screenshot('source-index.png') - def test_logout_flashed_message(self): + def test_logout(self): self.disable_js_torbrowser_driver() self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() self._source_submits_a_file() self._source_logs_out() - self._screenshot('source-logout_flashed_message.png') + self._screenshot('source-logout_page.png') diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -251,7 +251,10 @@ def test_login_and_logout(source_app): assert 'logged_in' not in session assert 'codename' not in session text = resp.data.decode('utf-8') - assert 'Thank you for exiting your session!' in text + + # This is part of the logout page message instructing users + # to click the 'New Identity' icon + assert 'This will clear your Tor browser activity data' in text def test_user_must_log_in_for_protected_views(source_app): @@ -706,7 +709,7 @@ def test_source_session_expiration(config, source_app): assert not session text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_source_session_expiration_create(config, source_app): @@ -731,7 +734,7 @@ def test_source_session_expiration_create(config, source_app): assert not session text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_csrf_error_page(config, source_app): @@ -743,7 +746,7 @@ def test_csrf_error_page(config, source_app): resp = app.post(url_for('main.create'), follow_redirects=True) text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_source_can_only_delete_own_replies(source_app):
Update UI/docs consistent with "New Identity" behavior change in Tor Browser 9.0 The "New Identity" behavior has changed once again in Tor Browser 9.0. There's now a dedicated button for it: ![New identity button](https://blog.torproject.org/sites/default/files/inline-images/toolbar%20update.png) So we have to update these instructions: > Thank you for exiting your session! Please select "New Identity" from the onion button in the Tor browser's toolbar to clear all history of your SecureDrop usage from this device. And also the relevant portion in the [source guide](https://docs.securedrop.org/en/release-1.1.0/source.html).
For this upcoming release, let's just update the text and the icon as follows: > **Sweep Activity** Click the “New Identity” `<x>` button in your Tor Browser’s toolbar before you continue. [Icon art](https://github.com/freedomofpress/securedrop-ux/blob/master/UI%20Assets/torbroom-black.png) `<x>` shd be 16x16 in the UI; art was rendered at 400%. For the following release, we should update all flash messaging in the Source UI per discussion in 31 Oct UX meeting. Discussion about those changes should happen in #4967 I would propose the following iteration of your wording: > **Clear Your Browser Activity:** Click the "New Identity" `<x>` button in your Tor Browser's toolbar before you do anything else. Reasoning: 1) While I understand the motivation to use "Sweep" to connect the language to the icon, given that this language is not used in Tor Browser itself, I think we should default to a more commonly used (and easier to translate) verb. As far as I can tell, similar functionality is typically called "Clear <x>" in applications, e.g. "Clear browser history". If the Tor team is planning to use "Sweep" then that would change my view. 2) I would go with the longer "Clear Your Browser Activity" to make it obvious that it is a request to the user do something. 3) "Before you continue" is a bit confusing IMO, because this message is shown after logout. "Continue what? I just logged out!" 1. Tor team doesn't know; Nina:Antonella pow-wow, forthcoming. 2. The possessive introduces a cognitive distraction, but "Clear Browser Activity" is direct and concise. Thank you for the suggestion! :) 3. Ok, touché... so: the user just logged-out, but now we want them to "do this." Why? I don't disagree with your broader flag here, but I don't feel we have an adequate answer between either of us. Could we code into the SUI a trigger to have it automagically do the "New Identity" thing, or would that require a forbidden script. @eloquence ...how do you feel about `moving on`? It acknowledges the user's decision to leave SD, while more clearly framing the ask imho, but I have no idea how it'll translate (or if I'm the only one who likes it in English)... ![image](https://user-images.githubusercontent.com/8262612/68171424-8f550800-ff28-11e9-8775-22219e383b64.png) "Moving on" is more idiomatic; we'd need to rely on documentation for translators to ensure consistent translation across languages. In terms of how it feels in English, I have a _weak_ preference for "before you do anything else" in the original proposal, primarily because I feel it leaves no ambiguity: it doesn't matter whether your next action is to - continue browsing in Tor - close the browser window - leave the Internet cafe you're in - switch to another application on the same computer - etc. In contrast, less specific phrases like "moving on" or "continue" seem to leave more room for interpretation. Move on where or to what? Continue to do what? My primary objection to "before you do anything else" is that it's wordy. It feels like a mouthful of Cheerios all about to bumble-out (complete with milk dribble on chin)... on an aside, from the general word-count of the message already being beyond excessive. The CTA is 3 words, and it's close to wrapping to a second line. So... that's whats informing my own biases. Anyone else have thoughts? @redshiftzero? What about "Clear Browser Activity: (Now | Next | before doing anything else), click the "New Identity" <broom> Button in your Tor Browser Toolbar." @ninavizz, what would you think about having a dedicated **Logged Out** screen as opposed to a flash message on the homepage? ILike, the flow would be: - user clicks **Log Out** - user is logged out and sent to screen where sole message/action is "you're not done, click that broom", with no links back to homepage (except for default logo one) - user presumably clicks broom. I'd LOVE that... for many reasons, namely everything y'all say above—AND THE opportunity to remind sources to return to check for replies or to re-submit under the same codename. Also a good place for some generic opsec reminders (cover yr tracks, don't tweet at the org you just leaked to, etc). I've never liked the semiotics in "Ohey, I logged out—but now there's an urgent-y alert?!" If it's a whole new page, the urgent-y text doesn't look like an erroneous action against the last page, whereas the current semiotics allude to that. The re-wording @ro proposed that @eloquence and I have been stuck on, largely revolves around that. Yes, I like the "whole new page" proposal, if we can keep the design super simple. If the only action we want the user to take is to click one button, then having that be the only message on the screen makes sense. I'd like to propose an elegant design with a VERY explicit/clear hierarchy that prioritizes: 1. Sweep ask 2. Check back soon ask 3. (whitespace) 4. (elegant minimalism) 5. OpSec tips for next steps Sounds good from my end. If 5. turns into a rabbit hole we can implement a simpler version for now. Standard disclaimer: elegance objectives may not be fully achievable. :P Golden ratio whitespace: :white_check_mark: Quick followup q: If a user's session expires, currently they are redirected to the main Source Interface page. That behaviour will remain the same even if we have a new 'post-logout' page that instructs the user to create a new Tor identity, correct? (As in, expired sessions should _not_ also redirect to the 'a new broom sweeps clean' page?)
2020-02-05T17:05:00Z
[]
[]
freedomofpress/securedrop
5,169
freedomofpress__securedrop-5169
[ "3642" ]
fd3bf719069412aa7a50c8ea6db875deb7b2b20c
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py --- a/journalist_gui/journalist_gui/SecureDropUpdater.py +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -128,12 +128,13 @@ def run(self): tailsconfig_command = ("/home/amnesia/Persistent/" "securedrop/securedrop-admin " "tailsconfig") + self.failure_reason = "" try: child = pexpect.spawn(tailsconfig_command) child.expect('SUDO password:') self.output += child.before.decode('utf-8') child.sendline(self.sudo_password) - child.expect(pexpect.EOF) + child.expect(pexpect.EOF, timeout=120) self.output += child.before.decode('utf-8') child.close() @@ -141,13 +142,15 @@ def run(self): # failures in the Ansible output. if child.exitstatus: self.update_success = False - self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa + if "[sudo via ansible" in self.output: + self.failure_reason = strings.tailsconfig_failed_sudo_password + else: + self.failure_reason = strings.tailsconfig_failed_generic_reason else: self.update_success = True except pexpect.exceptions.TIMEOUT: self.update_success = False - self.failure_reason = strings.tailsconfig_failed_sudo_password - + self.failure_reason = strings.tailsconfig_failed_timeout except subprocess.CalledProcessError: self.update_success = False self.failure_reason = strings.tailsconfig_failed_generic_reason diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py --- a/journalist_gui/journalist_gui/strings.py +++ b/journalist_gui/journalist_gui/strings.py @@ -25,12 +25,15 @@ "Contact your SecureDrop administrator " "or [email protected] immediately.") tailsconfig_failed_sudo_password = ('Administrator password incorrect. ' - 'Exiting upgrade - ' - 'click Update Now to try again.') + 'Click Update Now to try again.') tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. " "Contact your administrator. " "If you are an administrator, contact " "[email protected].") +tailsconfig_failed_timeout = ("Tails workstation configuration took too long. " + "Contact your administrator. " + "If you are an administrator, contact " + "[email protected].") install_update_button = 'Update Now' install_later_button = 'Update Later' sudo_password_text = ("Enter the Tails Administrator password you "
diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py --- a/journalist_gui/test_gui.py +++ b/journalist_gui/test_gui.py @@ -204,6 +204,18 @@ def test_tailsconfigThread_generic_failure(self, pt): @mock.patch('pexpect.spawn') def test_tailsconfigThread_sudo_password_is_wrong(self, pt): + child = pt() + before = MagicMock() + before.decode.return_value = "stuff[sudo via ansible, key=blahblahblah" + child.before = before + self.window.tails_thread.run() + self.assertNotIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.tailsconfig_failed_sudo_password) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_timeout(self, pt): child = pt() before = MagicMock() before.decode.side_effect = ["some data", @@ -213,7 +225,7 @@ def test_tailsconfigThread_sudo_password_is_wrong(self, pt): self.assertNotIn("failed=0", self.window.output) self.assertEqual(self.window.update_success, False) self.assertEqual(self.window.failure_reason, - strings.tailsconfig_failed_sudo_password) + strings.tailsconfig_failed_timeout) @mock.patch('pexpect.spawn') def test_tailsconfigThread_some_other_subprocess_error(self, pt):
SecureDrop Updater Tails Admin Password Prompt Appears to Fail When Clicking OK ## Description When using the SecureDrop updater GUI tool to update SecureDrop, correctly entering the Tails admin password and clicking OK with a trackpad fails and invokes a "Administrator password is incorrect" error, while hitting the Enter key instead of clicking OK works. ## Steps to Reproduce After a successful upgrade to SecureDrop 0.7.0, the SecureDrop Updater appeared to inform me I can upgrade to 0.8.0. Once I began that upgrade, I was prompted for the Tails admin password, which I entered and used the journalist workstation's built-in trackpad to click the OK button on the prompt, after which it failed and I had to click Update Now again to restart the upgrade process, only to have it fail the same way a few times. On the final try, I used the Enter key instead of clicking OK and it accepted the admin password and completed the upgrade. ## Expected Behavior Clicking OK after entering a correct Tails admin password should accept the password and continue the upgrade. ## Actual Behavior Clicking OK after entering a correct Tails admin password fails and tells me the password is wrong. ## Comments This was on a Tails laptop that had only just been upgraded from 0.6.0 to 0.7.0 via the command line. Once the 0.7.0 upgrade was complete, the more modern SecureDrop updater prompted me to go to the shiny chrome future that is 0.8.0 and I very much wanted to.
thanks for filing this @huertanix! unfortunately I was not able to reproduce this in Tails 3.8, can you confirm that was the version of Tails you used? And can you provide the error message, i.e. what happened when it failed? If you are able to provide it, the full output of the details tab in the GUI is very helpful since that output is very verbose (please be sure to grep through the output and remove any onion addresses or HidServAuth cookies) I could not reproduce it for `0.7.0` on Tails 3.6. closing this, but let us know if you have more information or see it again! I ran into this again but this time I was already at 0.8.0 (just cloned the repo today but the GUI updater popped up anyway after running `./securedrop-admin tailsconfig`) for a journalist (not admin) Tails drive. I had a very easy and specific Tails admin password and it would fail every time I tried it, whether with a mouse click or enter key this time. Here's the tab output: > When the update begins, this area will populate with output. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: Update needed > INFO: Verifying signature on latest update... > gpg: key 0x310F561200F4AD77: public key "SecureDrop Release Signing Key" imported > gpg: no ultimately trusted keys found > gpg: Total number processed: 1 > gpg: imported: 1 > INFO: Signature verification successful. > Note: checking out '0.8.0'. > > You are in 'detached HEAD' state. You can look around, make experimental > changes and commit them, and you can discard any commits you make in this > state without impacting any branches by performing another checkout. > > If you want to create a new branch to retain commits you create, you may > do so (now or later) by using -b with the checkout command again. Example: > > git checkout -b <new-branch-name> > > HEAD is now at e2c2f07d... SecureDrop 0.8.0 > INFO: Updated to SecureDrop 0.8.0. > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin upgraded > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: All updates applied > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin are up-to-date > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: All updates applied > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin are up-to-date > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: All updates applied > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin are up-to-date > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: All updates applied > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin are up-to-date > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. > Fetching and verifying latest update... (5 mins remaining) > INFO: Applying SecureDrop updates... > INFO: Checking for SecureDrop updates... > Fetching origin > INFO: All updates applied > Checking dependencies are up to date... (2 mins remaining) > INFO: Virtualenv already exists, not creating > INFO: Checking Python dependencies for securedrop-admin > INFO: Python dependencies for securedrop-admin are up-to-date > INFO: Finished installing SecureDrop dependencies > Configuring local Tails environment... (1 min remaining) > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > INFO: Configuring Tails workstation environment > INFO: You'll be prompted for the temporary Tails admin password, which was set on Tails login screen > Administrator password incorrect. Exiting upgrade - click Update Now to try again. I still can not reproduce it :( I installed fresh Tails 3.8, and there I moved to `develop` and then did tailsconfig first. I moved to `0.7.0` and then disconnected and connected back the network, that fired up the updater, and typed the sudo password manually. If I am on `0.8.0`, I could not manage to get the application up automatically. Reopening for further investigation based on @huertanix 's report. We should see if we can get a repro or root cause on this ahead of the next release. Making a note to set aside some time for additional investigation in the next sprint. Sadly, we're having to bump this again. On the upside, we've not seen any reports in support about this issue, but we should still make one more attempt to repro before the next release. Cannot reproduce on current Tails (3.10.1). Downgraded to 0.7.0, bumped the network connection, the updater appears. Clicked *Update Now*, the admin password comes up. Put in admin password, clicked *OK*. Update completes without error to 0.9.1. Bumped network again, updater appears, repeat update process, clicking *OK* instead of hitting Return. Update completes without error to 0.10.0 Repeated the above 3 times. No errors. Given multiple unsuccessful repro attempts, no other reports, and no obvious potential causes in the code itself, we'll have to close this for now. @huertanix, thanks for reporting -- next time you encounter this issue, if you can get a clean repro, let's sync up in real-time to see if we can get to the bottom of this. During QA for 1.2.1 today I encountered this issue: despite my very carefully typing in the Tails admin password, the graphical updater told me it was incorrect. Confirmed I was using the correct pw by running `sudo echo hello` in the terminal, no problems. Got a report of this from a production user so bumping in priority. Saw this myself today with first attempt failing (though password was correct), second attempt working. Poked a bit at the Qt input but isolating that I don't see any issues at least in my Ubuntu dev env -- e.g., it ignores control characters and only outputs what I input. My hypothesis, FWIW, is that the logic for setting the password via a child process sometimes doesn't work (some timing issue).
2020-03-20T20:31:51Z
[]
[]
freedomofpress/securedrop
5,178
freedomofpress__securedrop-5178
[ "5176" ]
29dc4ac1707dd5d0bc9ba83a8adec5e6955e1c54
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -49,29 +49,48 @@ def main(staging=False): test_otp_secret, is_admin=False) + journalist_tobe_deleted = add_test_user("clarkkent", + test_password, + test_otp_secret, + is_admin=False, + first_name="Clark", + last_name="Kent") + # Add test sources and submissions num_sources = int(os.getenv('NUM_SOURCES', 2)) - for _ in range(num_sources): + for i in range(num_sources): + if i == 0: + # For the first source, the journalist who replied will be deleted + create_source_and_submissions(journalist_who_replied=journalist_tobe_deleted) + continue create_source_and_submissions() + # Now let us delete one journalist + db.session.delete(journalist_tobe_deleted) + db.session.commit() -def add_test_user(username, password, otp_secret, is_admin=False): +def add_test_user(username, password, otp_secret, is_admin=False, + first_name="", last_name=""): try: user = Journalist(username=username, password=password, - is_admin=is_admin) + is_admin=is_admin, + first_name=first_name, + last_name=last_name) user.otp_secret = otp_secret db.session.add(user) db.session.commit() print('Test user successfully added: ' 'username={}, password={}, otp_secret={}, is_admin={}' ''.format(username, password, otp_secret, is_admin)) + return user except IntegrityError: print("Test user already added") db.session.rollback() -def create_source_and_submissions(num_submissions=2, num_replies=2): +def create_source_and_submissions(num_submissions=2, num_replies=2, + journalist_who_replied=None): # Store source in database codename = current_app.crypto_util.genrandomid() filesystem_id = current_app.crypto_util.hash_codename(codename) @@ -109,7 +128,10 @@ def create_source_and_submissions(num_submissions=2, num_replies=2): config.JOURNALIST_KEY], current_app.storage.path(source.filesystem_id, fname)) - journalist = Journalist.query.first() + if not journalist_who_replied: + journalist = Journalist.query.first() + else: + journalist = journalist_who_replied reply = Reply(journalist, source, fname) db.session.add(reply) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -278,6 +278,15 @@ def __repr__(self): def to_json(self): # type: () -> Dict[str, Union[str, int, bool]] + username = "deleted" + first_name = "" + last_name = "" + uuid = "deleted" + if self.journalist: + username = self.journalist.username + first_name = self.journalist.first_name + last_name = self.journalist.last_name + uuid = self.journalist.uuid json_submission = { 'source_url': url_for('api.single_source', source_uuid=self.source.uuid), @@ -286,10 +295,10 @@ def to_json(self): reply_uuid=self.uuid), 'filename': self.filename, 'size': self.size, - 'journalist_username': self.journalist.username, - 'journalist_first_name': self.journalist.first_name, - 'journalist_last_name': self.journalist.last_name, - 'journalist_uuid': self.journalist.uuid, + 'journalist_username': username, + 'journalist_first_name': first_name, + 'journalist_last_name': last_name, + 'journalist_uuid': uuid, 'uuid': self.uuid, 'is_deleted_by_source': self.deleted_by_source, }
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -218,6 +218,23 @@ def test_files(journalist_app, test_journo): 'replies': source.replies} [email protected](scope='function') +def test_files_deleted_journalist(journalist_app, test_journo): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + test_journo['journalist'] + juser, _ = utils.db_helper.init_journalist("f", "l", is_admin=False) + utils.db_helper.reply(juser, source, 1) + utils.db_helper.delete_journalist(juser) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions, + 'replies': source.replies} + + @pytest.fixture(scope='function') def journalist_api_token(journalist_app, test_journo): with journalist_app.test_client() as app: diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -477,6 +477,31 @@ def test_authorized_user_can_get_single_reply(journalist_app, test_files, test_files['source'].replies[0].size +def test_reply_of_deleted_journalist(journalist_app, + test_files_deleted_journalist, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files_deleted_journalist['source'].replies[0].uuid + uuid = test_files_deleted_journalist['source'].uuid + response = app.get(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + assert response.json['uuid'] == reply_uuid + assert response.json['journalist_username'] == "deleted" + assert response.json['journalist_uuid'] == "deleted" + assert response.json['journalist_first_name'] == "" + assert response.json['journalist_last_name'] == "" + assert response.json['is_deleted_by_source'] is False + assert response.json['filename'] == \ + test_files_deleted_journalist['source'].replies[0].filename + assert response.json['size'] == \ + test_files_deleted_journalist['source'].replies[0].size + + def test_authorized_user_can_delete_single_submission(journalist_app, test_submissions, journalist_api_token): diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -39,6 +39,17 @@ def init_journalist(first_name=None, last_name=None, is_admin=False): return user, user_pw +def delete_journalist(journalist): + """Deletes a journalist from the database. + + :param models.Journalist journalist: The journalist to delete + + :returns: None + """ + db.session.delete(journalist) + db.session.commit() + + def reply(journalist, source, num_replies): """Generates and submits *num_replies* replies to *source* from *journalist*. Returns reply objects as a list.
deleted journalist causes 500 on GET /replies API endpoint ## Description Deleted journalist causes GET /replies endpoint to 500 if there are any replies from that deleted journalist. ## Steps to Reproduce 0. Submit a document or message as a source. 1. Reply to the source from journalist account A. 2. Delete the account of journalist account A (from another admin account). 3. `GET /api/v1/replies` ## Expected Behavior 200 OK ## Actual Behavior ``` 172.17.0.1 - - [31/Mar/2020 20:57:10] "GET /api/v1/replies HTTP/1.1" 500 - Traceback (most recent call last): File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 48, in decorated_function return f(*args, **kwargs) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 310, in get_all_replies {'replies': [reply.to_json() for reply in replies]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 310, in <listcomp> {'replies': [reply.to_json() for reply in replies]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/models.py", line 289, in to_json 'journalist_username': self.journalist.username, AttributeError: 'NoneType' object has no attribute 'username' ``` ## Comments We should handle where the `journalist` is `None` [here](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/models.py#L289-L292) It would be wise to also add test data to `create-dev-data.py` for the deleted journalist scenario (since that is used for development of securedrop-client).
FYI: in my testing, the SecureDrop Client will not currently handle it gracefully if the journalist-related JSON keys are missing from the API response, or if the value is NULL. If the values are empty strings, it will create an empty record in its USERS table, but apparently operate as expected.
2020-04-01T10:50:27Z
[]
[]
freedomofpress/securedrop
5,184
freedomofpress__securedrop-5184
[ "5183" ]
8576e0bb51468ca70c92f1faf6e58f000851a6c8
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -58,12 +58,14 @@ def main(staging=False): # Add test sources and submissions num_sources = int(os.getenv('NUM_SOURCES', 2)) - for i in range(num_sources): - if i == 0: + for i in range(1, num_sources + 1): + if i == 1: # For the first source, the journalist who replied will be deleted - create_source_and_submissions(journalist_who_replied=journalist_tobe_deleted) + create_source_and_submissions( + i, num_sources, journalist_who_replied=journalist_tobe_deleted + ) continue - create_source_and_submissions() + create_source_and_submissions(i, num_sources) # Now let us delete one journalist db.session.delete(journalist_tobe_deleted) db.session.commit() @@ -89,8 +91,9 @@ def add_test_user(username, password, otp_secret, is_admin=False, db.session.rollback() -def create_source_and_submissions(num_submissions=2, num_replies=2, - journalist_who_replied=None): +def create_source_and_submissions( + source_index, source_count, num_submissions=2, num_replies=2, journalist_who_replied=None +): # Store source in database codename = current_app.crypto_util.genrandomid() filesystem_id = current_app.crypto_util.hash_codename(codename) @@ -124,7 +127,7 @@ def create_source_and_submissions(num_submissions=2, num_replies=2, source.journalist_filename) current_app.crypto_util.encrypt( next(replies), - [current_app.crypto_util.getkey(source.filesystem_id), + [current_app.crypto_util.get_fingerprint(source.filesystem_id), config.JOURNALIST_KEY], current_app.storage.path(source.filesystem_id, fname)) @@ -137,9 +140,13 @@ def create_source_and_submissions(num_submissions=2, num_replies=2, db.session.commit() - print("Test source (codename: '{}', journalist designation '{}') " - "added with {} submissions and {} replies".format( - codename, journalist_designation, num_submissions, num_replies)) + print( + "Test source {}/{} (codename: '{}', journalist designation '{}') " + "added with {} submissions and {} replies".format( + source_index, source_count, codename, journalist_designation, + num_submissions, num_replies + ) + ) if __name__ == "__main__": # pragma: no cover diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import collections from distutils.version import StrictVersion import pretty_bad_protocol as gnupg import os @@ -12,6 +11,7 @@ from datetime import date from flask import current_app from pretty_bad_protocol._util import _is_stream, _make_binary_stream +from redis import Redis import rm @@ -55,32 +55,6 @@ def monkey_patch_delete_handle_status(self, key, value): gnupg._parsers.DeleteResult._handle_status = monkey_patch_delete_handle_status -class FIFOCache(): - """ - We implemented this simple cache instead of using functools.lru_cache - (this uses a different cache replacement policy (FIFO), but either - FIFO or LRU works for our key fingerprint cache) - due to the inability to remove an item from its cache. - - See: https://bugs.python.org/issue28178 - """ - def __init__(self, maxsize: int): - self.maxsize = maxsize - self.cache = collections.OrderedDict() # type: collections.OrderedDict - - def get(self, item): - if item in self.cache: - return self.cache[item] - - def put(self, item, value): - self.cache[item] = value - if len(self.cache) > self.maxsize: - self.cache.popitem(last=False) - - def delete(self, item): - del self.cache[item] - - class CryptoException(Exception): pass @@ -99,8 +73,8 @@ class CryptoUtil: # to set an expiration date. DEFAULT_KEY_EXPIRATION_DATE = '0' - keycache_limit = 1000 - keycache = FIFOCache(keycache_limit) + REDIS_FINGERPRINT_HASH = "sd/crypto-util/fingerprints" + REDIS_KEY_HASH = "sd/crypto-util/keys" def __init__(self, scrypt_params, @@ -114,7 +88,7 @@ def __init__(self, self.__securedrop_root = securedrop_root self.__word_list = word_list - if os.environ.get('SECUREDROP_ENV') == 'test': + if os.environ.get('SECUREDROP_ENV') in ('dev', 'test'): # Optimize crypto to speed up tests (at the expense of security # DO NOT use these settings in production) self.__gpg_key_length = 1024 @@ -148,6 +122,8 @@ def __init__(self, with io.open(adjectives_file) as f: self.adjectives = f.read().splitlines() + self.redis = Redis(decode_responses=True) + # Make sure these pass before the app can run def do_runtime_tests(self): if self.scrypt_id_pepper == self.scrypt_gpg_pepper: @@ -243,7 +219,7 @@ def genkeypair(self, name, secret): return genkey_obj def delete_reply_keypair(self, source_filesystem_id): - key = self.getkey(source_filesystem_id) + key = self.get_fingerprint(source_filesystem_id) # If this source was never flagged for review, they won't have a reply # keypair if not key: @@ -254,29 +230,45 @@ def delete_reply_keypair(self, source_filesystem_id): temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) # The subkeys keyword argument deletes both secret and public keys. temp_gpg.delete_keys(key, secret=True, subkeys=True) - self.keycache.delete(source_filesystem_id) + self.redis.hdel(self.REDIS_KEY_HASH, self.get_fingerprint(source_filesystem_id)) + self.redis.hdel(self.REDIS_FINGERPRINT_HASH, source_filesystem_id) + + def get_fingerprint(self, name): + """ + Returns the fingerprint of the GPG key for the given name. - def getkey(self, name): - fingerprint = self.keycache.get(name) - if fingerprint: # cache hit + The supplied name is usually a source filesystem ID. + """ + fingerprint = self.redis.hget(self.REDIS_FINGERPRINT_HASH, name) + if fingerprint: return fingerprint - # cache miss for key in self.gpg.list_keys(): for uid in key['uids']: if name in uid: - self.keycache.put(name, key['fingerprint']) + self.redis.hset(self.REDIS_FINGERPRINT_HASH, name, key['fingerprint']) return key['fingerprint'] return None - def export_pubkey(self, name): - fingerprint = self.getkey(name) - if fingerprint: - return self.gpg.export_keys(fingerprint) - else: + def get_pubkey(self, name): + """ + Returns the GPG public key for the given name. + + The supplied name is usually a source filesystem ID. + """ + fingerprint = self.get_fingerprint(name) + if not fingerprint: return None + key = self.redis.hget(self.REDIS_KEY_HASH, fingerprint) + if key: + return key + + key = self.gpg.export_keys(fingerprint) + self.redis.hset(self.REDIS_KEY_HASH, fingerprint, key) + return key + def encrypt(self, plaintext, fingerprints, output=None): # Verify the output path if output: diff --git a/securedrop/journalist.py b/securedrop/journalist.py --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -16,7 +16,7 @@ def prime_keycache(): """ with app.app_context(): for source in Source.query.filter_by(pending=False).all(): - app.crypto_util.getkey(source.filesystem_id) + app.crypto_util.get_pubkey(source.filesystem_id) prime_keycache() diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -33,7 +33,7 @@ def remove_star(filesystem_id): def col(filesystem_id): form = ReplyForm() source = get_source(filesystem_id) - source.has_key = current_app.crypto_util.getkey(filesystem_id) + source.has_key = current_app.crypto_util.get_fingerprint(filesystem_id) return render_template("col.html", filesystem_id=filesystem_id, source=source, form=form) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -108,7 +108,7 @@ def reply(): g.source.journalist_filename) current_app.crypto_util.encrypt( form.message.data, - [current_app.crypto_util.getkey(g.filesystem_id), + [current_app.crypto_util.get_fingerprint(g.filesystem_id), config.JOURNALIST_KEY], output=current_app.storage.path(g.filesystem_id, filename), ) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -118,7 +118,7 @@ def collection(self): @property def fingerprint(self): - return current_app.crypto_util.getkey(self.filesystem_id) + return current_app.crypto_util.get_fingerprint(self.filesystem_id) @fingerprint.setter def fingerprint(self, value): @@ -131,7 +131,7 @@ def fingerprint(self): @property def public_key(self): # type: () -> str - return current_app.crypto_util.export_pubkey(self.filesystem_id) + return current_app.crypto_util.get_pubkey(self.filesystem_id) @public_key.setter def public_key(self, value): diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -130,7 +130,7 @@ def lookup(): # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) - if not current_app.crypto_util.getkey(g.filesystem_id) and \ + if not current_app.crypto_util.get_fingerprint(g.filesystem_id) and \ g.source.flagged: db_uri = current_app.config['SQLALCHEMY_DATABASE_URI'] async_genkey(current_app.crypto_util, @@ -145,7 +145,7 @@ def lookup(): replies=replies, flagged=g.source.flagged, new_user=session.get('new_user', None), - haskey=current_app.crypto_util.getkey( + haskey=current_app.crypto_util.get_fingerprint( g.filesystem_id)) @view.route('/submit', methods=('POST',))
diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -92,8 +92,10 @@ def test_apache_headers_journalist_interface(host, header): securedrop_test_vars.apache_listening_address), "WSGIDaemonProcess journalist processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format( # noqa securedrop_test_vars.securedrop_code), - 'WSGIProcessGroup journalist', - 'WSGIScriptAlias / /var/www/journalist.wsgi', + ( + 'WSGIScriptAlias / /var/www/journalist.wsgi ' + 'process-group=journalist application-group=journalist' + ), 'WSGIPassAuthorization On', 'Header set Cache-Control "no-store"', "Alias /static {}/static".format(securedrop_test_vars.securedrop_code), diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py --- a/securedrop/tests/functional/functional_test.py +++ b/securedrop/tests/functional/functional_test.py @@ -307,7 +307,7 @@ def wait_for_source_key(self, source_name): filesystem_id = self.source_app.crypto_util.hash_codename(source_name) def key_available(filesystem_id): - assert self.source_app.crypto_util.getkey(filesystem_id) + assert self.source_app.crypto_util.get_fingerprint(filesystem_id) self.wait_for(lambda: key_available(filesystem_id), timeout=60) diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -11,7 +11,7 @@ import crypto_util import models -from crypto_util import CryptoUtil, CryptoException, FIFOCache +from crypto_util import CryptoUtil, CryptoException from db import db @@ -40,7 +40,7 @@ def test_encrypt_success(source_app, config, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -70,7 +70,7 @@ def test_encrypt_without_output(source_app, config, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY]) plaintext = source_app.crypto_util.decrypt( test_source['codename'], @@ -96,7 +96,7 @@ def test_encrypt_binary_stream(source_app, config, test_source): with io.open(os.path.realpath(__file__)) as fh: ciphertext = source_app.crypto_util.encrypt( fh, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -116,7 +116,7 @@ def test_encrypt_fingerprints_not_a_list_or_tuple(source_app, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - source_app.crypto_util.getkey(test_source['filesystem_id']), + source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) plaintext = source_app.crypto_util.decrypt(test_source['codename'], @@ -133,7 +133,7 @@ def test_basic_encrypt_then_decrypt_multiple_recipients(source_app, with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -208,7 +208,7 @@ def test_genkeypair(source_app): db.session.commit() source_app.crypto_util.genkeypair(source.filesystem_id, codename) - assert source_app.crypto_util.getkey(filesystem_id) is not None + assert source_app.crypto_util.get_fingerprint(filesystem_id) is not None def parse_gpg_date_string(date_string): @@ -241,15 +241,15 @@ def test_reply_keypair_creation_and_expiration_dates(source_app): db.session.commit() source_app.crypto_util.genkeypair(source.filesystem_id, codename) - # crypto_util.getkey only returns the fingerprint of the key. We need + # crypto_util.get_fingerprint only returns the fingerprint of the key. We need # the full output of gpg.list_keys() to check the creation and # expire dates. # - # TODO: it might be generally useful to refactor crypto_util.getkey so + # TODO: it might be generally useful to refactor crypto_util.get_fingerprint so # it always returns the entire key dictionary instead of just the # fingerprint (which is always easily extracted from the entire key # dictionary). - new_key_fingerprint = source_app.crypto_util.getkey(filesystem_id) + new_key_fingerprint = source_app.crypto_util.get_fingerprint(filesystem_id) new_key = [key for key in source_app.crypto_util.gpg.list_keys() if new_key_fingerprint == key['fingerprint']][0] @@ -267,7 +267,7 @@ def test_reply_keypair_creation_and_expiration_dates(source_app): def test_delete_reply_keypair(source_app, test_source): fid = test_source['filesystem_id'] source_app.crypto_util.delete_reply_keypair(fid) - assert source_app.crypto_util.getkey(fid) is None + assert source_app.crypto_util.get_fingerprint(fid) is None def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source, @@ -285,7 +285,7 @@ def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source captured = capsys.readouterr() assert "ValueError: Unknown status message: 'PINENTRY_LAUNCHED'" not in captured.err - assert source_app.crypto_util.getkey(fid) is None + assert source_app.crypto_util.get_fingerprint(fid) is None def test_delete_reply_keypair_no_key(source_app): @@ -295,25 +295,24 @@ def test_delete_reply_keypair_no_key(source_app): source_app.crypto_util.delete_reply_keypair('Reality Winner') -def test_getkey(source_app, test_source): - assert (source_app.crypto_util.getkey(test_source['filesystem_id']) +def test_get_fingerprint(source_app, test_source): + assert (source_app.crypto_util.get_fingerprint(test_source['filesystem_id']) is not None) # check that a non-existent key returns None - assert source_app.crypto_util.getkey('x' * 50) is None + assert source_app.crypto_util.get_fingerprint('x' * 50) is None -def test_export_pubkey(source_app, test_source): +def test_get_pubkey(source_app, test_source): begin_pgp = '-----BEGIN PGP PUBLIC KEY BLOCK----' # check that a filesystem_id exports the pubkey - exported = source_app.crypto_util.export_pubkey( - test_source['filesystem_id']) - assert exported.startswith(begin_pgp) + pubkey = source_app.crypto_util.get_pubkey(test_source['filesystem_id']) + assert pubkey.startswith(begin_pgp) # check that a non-existent identifer exports None - exported = source_app.crypto_util.export_pubkey('x' * 50) - assert exported is None + pubkey = source_app.crypto_util.get_pubkey('x' * 50) + assert pubkey is None @given( @@ -343,25 +342,3 @@ def test_encrypt_then_decrypt_gives_same_result( decrypted_text = crypto.decrypt(secret, ciphertext) assert decrypted_text == message - - -def test_fifo_cache(): - cache = FIFOCache(3) - - cache.put('item 1', 1) - cache.put('item 2', 2) - cache.put('item 3', 3) - - assert cache.get('item 1') == 1 - assert cache.get('item 2') == 2 - assert cache.get('item 3') == 3 - - cache.put('item 4', 4) - # Maxsize is 3, so adding item 4 should kick out item 1 - assert not cache.get('item 1') - assert cache.get('item 2') == 2 - assert cache.get('item 3') == 3 - assert cache.get('item 4') == 4 - - cache.delete('item 2') - assert not cache.get('item 2') diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -304,7 +304,7 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, # Block up to 15s for the reply keypair, so we can test sending a reply def assertion(): - assert current_app.crypto_util.getkey(filesystem_id) is not None + assert current_app.crypto_util.get_fingerprint(filesystem_id) is not None utils.asynchronous.wait_for_assertion(assertion, 15) # Create 2 replies to test deleting on journalist and source interface diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1691,7 +1691,7 @@ def test_delete_source_deletes_source_key(journalist_app, utils.db_helper.reply(journo, source, 2) # Source key exists - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['filesystem_id']) assert source_key is not None @@ -1699,7 +1699,7 @@ def test_delete_source_deletes_source_key(journalist_app, test_source['filesystem_id']) # Source key no longer exists - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['filesystem_id']) assert source_key is None diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -652,7 +652,7 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, # First we must encrypt the reply, or it will get rejected # by the server. - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['source'].filesystem_id) reply_content = current_app.crypto_util.gpg.encrypt( 'This is a plaintext reply', source_key).data diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -71,7 +71,7 @@ def reply(journalist, source, num_replies): source.journalist_filename) current_app.crypto_util.encrypt( str(os.urandom(1)), - [current_app.crypto_util.getkey(source.filesystem_id), + [current_app.crypto_util.get_fingerprint(source.filesystem_id), config.JOURNALIST_KEY], current_app.storage.path(source.filesystem_id, fname))
Journalist API endpoint `get_all_sources` is still too slow ## Description The `get_all_sources` endpoint of the journalist API is still prohibitively slow with large numbers of sources. Caching source public keys would reduce the time it takes. Also, a cache limit of 1000 for key caching may not be enough for larger installations. Given the relatively small amount of memory these caches require, we should increase that.
2020-04-01T20:05:35Z
[]
[]
freedomofpress/securedrop
5,203
freedomofpress__securedrop-5203
[ "4929" ]
2350afb6d456dd278847159b54c77723de850c5d
diff --git a/admin/bootstrap.py b/admin/bootstrap.py new file mode 100755 --- /dev/null +++ b/admin/bootstrap.py @@ -0,0 +1,284 @@ +# -*- mode: python; coding: utf-8 -*- +# +# Copyright (C) 2013-2018 Freedom of the Press Foundation & al +# Copyright (C) 2018 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# + +import argparse +import logging +import os +import shutil +import subprocess +import sys + +sdlog = logging.getLogger(__name__) + +DIR = os.path.dirname(os.path.realpath(__file__)) +VENV_DIR = os.path.join(DIR, ".venv3") + + +def setup_logger(verbose=False): + """ Configure logging handler """ + # Set default level on parent + sdlog.setLevel(logging.DEBUG) + level = logging.DEBUG if verbose else logging.INFO + + stdout = logging.StreamHandler(sys.stdout) + stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) + stdout.setLevel(level) + sdlog.addHandler(stdout) + + +def run_command(command): + """ + Wrapper function to display stdout for running command, + similar to how shelling out in a Bash script displays rolling output. + + Yields a list of the stdout from the `command`, and raises a + CalledProcessError if `command` returns non-zero. + """ + popen = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + for stdout_line in iter(popen.stdout.readline, b""): + yield stdout_line + popen.stdout.close() + return_code = popen.wait() + if return_code: + raise subprocess.CalledProcessError(return_code, command) + + +def is_tails(): + try: + id = subprocess.check_output('lsb_release --id --short', + shell=True).decode('utf-8').strip() + except subprocess.CalledProcessError: + id = None + + # dirty hack to unreliably detect Tails 4.0~beta2 + if id == 'Debian': + if os.uname()[1] == 'amnesia': + id = 'Tails' + + return id == 'Tails' + + +def clean_up_tails3_venv(virtualenv_dir=VENV_DIR): + """ + Tails 3.x, based on debian stretch uses libpython3.5, whereas Tails 4.x is + based on Debian Buster and uses libpython3.7. This means that the Tails 3.x + virtualenv will not work under Tails 4.x, and will need to be destroyed and + rebuilt. We can detect if the version of libpython is 3.5 in the + admin/.venv3/ folder, and delete it if that's the case. This will ensure a + smooth upgrade from Tails 3.x to Tails 4.x. + """ + if is_tails(): + try: + dist = subprocess.check_output('lsb_release --codename --short', + shell=True).strip() + except subprocess.CalledProcessError: + dist = None + + # tails4 is based on buster + if dist == b'buster': + python_lib_path = os.path.join(virtualenv_dir, "lib/python3.5") + if os.path.exists(os.path.join(python_lib_path)): + sdlog.info( + "Tails 3 Python 3 virtualenv detected. " + "Removing it." + ) + shutil.rmtree(virtualenv_dir) + sdlog.info("Tails 3 Python 3 virtualenv deleted.") + + +def checkenv(args): + clean_up_tails3_venv(VENV_DIR) + if not os.path.exists(os.path.join(VENV_DIR, "bin/activate")): + sdlog.error('Please run "securedrop-admin setup".') + sys.exit(1) + + +def maybe_torify(): + if is_tails(): + return ['torify'] + else: + return [] + + +def install_apt_dependencies(args): + """ + Install apt dependencies in Tails. In order to install Ansible in + a virtualenv, first there are a number of Python prerequisites. + """ + sdlog.info("Installing SecureDrop Admin dependencies") + sdlog.info(("You'll be prompted for the temporary Tails admin password," + " which was set on Tails login screen")) + + apt_command = ['sudo', 'su', '-c', + "apt-get update && \ + apt-get -q -o=Dpkg::Use-Pty=0 install -y \ + python3-virtualenv \ + python3-yaml \ + python3-pip \ + ccontrol \ + virtualenv \ + libffi-dev \ + libssl-dev \ + libpython3-dev", + ] + + try: + # Print command results in real-time, to keep Admin apprised + # of progress during long-running command. + for output_line in run_command(apt_command): + print(output_line.decode('utf-8').rstrip()) + except subprocess.CalledProcessError: + # Tails supports apt persistence, which was used by SecureDrop + # under Tails 2.x. If updates are being applied, don't try to pile + # on with more apt requests. + sdlog.error(("Failed to install apt dependencies. Check network" + " connection and try again.")) + raise + + +def envsetup(args, virtualenv_dir=VENV_DIR): + """Installs Admin tooling required for managing SecureDrop. Specifically: + + * updates apt-cache + * installs apt packages for Python virtualenv + * creates virtualenv + * installs pip packages inside virtualenv + + The virtualenv is created within the Persistence volume in Tails, so that + Ansible is available to the Admin on subsequent boots without requiring + installation of packages again. + """ + # clean up tails 3.x venv when migrating to tails 4.x + clean_up_tails3_venv(virtualenv_dir) + + # virtualenv doesnt exist? Install dependencies and create + if not os.path.exists(virtualenv_dir): + + install_apt_dependencies(args) + + # Technically you can create a virtualenv from within python + # but pip can only be run over tor on tails, and debugging that + # along with instaling a third-party dependency is not worth + # the effort here. + sdlog.info("Setting up virtualenv") + try: + sdlog.debug(subprocess.check_output( + maybe_torify() + ['virtualenv', + '--python=python3', + virtualenv_dir + ], + stderr=subprocess.STDOUT)) + except subprocess.CalledProcessError as e: + sdlog.debug(e.output) + sdlog.error(("Unable to create virtualenv. Check network settings" + " and try again.")) + sdlog.debug("Cleaning up virtualenv") + if os.path.exists(virtualenv_dir): + shutil.rmtree(virtualenv_dir) + raise + else: + sdlog.info("Virtualenv already exists, not creating") + + install_pip_dependencies(args) + if os.path.exists(os.path.join(DIR, 'setup.py')): + install_pip_self(args) + + sdlog.info("Finished installing SecureDrop dependencies") + + +def install_pip_self(args): + pip_install_cmd = [ + os.path.join(VENV_DIR, 'bin', 'pip3'), + 'install', '-e', DIR + ] + try: + subprocess.check_output(maybe_torify() + pip_install_cmd, + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + sdlog.debug(e.output) + sdlog.error("Unable to install self, run with -v for more information") + raise + + +def install_pip_dependencies(args, pip_install_cmd=[ + os.path.join(VENV_DIR, 'bin', 'pip3'), + 'install', + '--no-deps', + # Specify requirements file. + '-r', os.path.join(DIR, 'requirements.txt'), + '--require-hashes', + # Make sure to upgrade packages only if necessary. + '-U', '--upgrade-strategy', 'only-if-needed', +]): + """ + Install Python dependencies via pip into virtualenv. + """ + + sdlog.info("Checking Python dependencies for securedrop-admin") + try: + pip_output = subprocess.check_output(maybe_torify() + pip_install_cmd, + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + sdlog.debug(e.output) + sdlog.error(("Failed to install pip dependencies. Check network" + " connection and try again.")) + raise + + sdlog.debug(pip_output) + if "Successfully installed" in str(pip_output): + sdlog.info("Python dependencies for securedrop-admin upgraded") + else: + sdlog.info("Python dependencies for securedrop-admin are up-to-date") + + +def parse_argv(argv): + parser = argparse.ArgumentParser() + parser.add_argument('-v', action='store_true', default=False, + help="Increase verbosity on output") + parser.set_defaults(func=envsetup) + + subparsers = parser.add_subparsers() + + envsetup_parser = subparsers.add_parser( + 'envsetup', + help='Set up the admin virtualenv.' + ) + envsetup_parser.set_defaults(func=envsetup) + + checkenv_parser = subparsers.add_parser( + 'checkenv', + help='Check that the admin virtualenv is properly set up.' + ) + checkenv_parser.set_defaults(func=checkenv) + + return parser.parse_args(argv) + + +if __name__ == "__main__": + args = parse_argv(sys.argv[1:]) + setup_logger(args.v) + + try: + args.func(args) + except Exception: + sys.exit(1) + else: + sys.exit(0) diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py new file mode 100755 --- /dev/null +++ b/admin/securedrop_admin/__init__.py @@ -0,0 +1,989 @@ +# -*- mode: python; coding: utf-8 -*- +# +# Copyright (C) 2013-2018 Freedom of the Press Foundation & al +# Copyright (C) 2018 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +""" +SecureDrop Admin Toolkit. + +For use by administrators to install, maintain, and manage their SD +instances. +""" + +import argparse +import logging +import os +import io +import re +import subprocess +import sys +import json +import base64 +import prompt_toolkit +from prompt_toolkit.validation import Validator, ValidationError +import yaml +from pkg_resources import parse_version +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import x25519 + +sdlog = logging.getLogger(__name__) +RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77' +DEFAULT_KEYSERVER = 'hkps://keys.openpgp.org' +SUPPORT_ONION_URL = 'http://support6kv2242qx.onion' +SUPPORT_URL = 'https://support.freedom.press' +EXIT_SUCCESS = 0 +EXIT_SUBPROCESS_ERROR = 1 +EXIT_INTERRUPT = 2 + + +class FingerprintException(Exception): + pass + + +class JournalistAlertEmailException(Exception): + pass + + +class SiteConfig(object): + + class ValidateNotEmpty(Validator): + def validate(self, document): + if document.text != '': + return True + raise ValidationError( + message="Must not be an empty string") + + class ValidateTime(Validator): + def validate(self, document): + if document.text.isdigit() and int(document.text) in range(0, 24): + return True + raise ValidationError( + message="Must be an integer between 0 and 23") + + class ValidateUser(Validator): + def validate(self, document): + text = document.text + if text != '' and text != 'root' and text != 'amnesia': + return True + raise ValidationError( + message="Must not be root, amnesia or an empty string") + + class ValidateIP(Validator): + def validate(self, document): + if re.match(r'((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', + document.text): + return True + raise ValidationError( + message="An IP address must be something like 10.240.20.83") + + class ValidatePath(Validator): + def __init__(self, basedir): + self.basedir = basedir + super(SiteConfig.ValidatePath, self).__init__() + + def validate(self, document): + if document.text == '': + raise ValidationError( + message='an existing file name is required') + path = os.path.join(self.basedir, document.text) + if os.path.exists(path): + return True + raise ValidationError( + message=path + ' file does not exist') + + class ValidateOptionalPath(ValidatePath): + def validate(self, document): + if document.text == '': + return True + return super(SiteConfig.ValidateOptionalPath, self).validate( + document) + + class ValidateYesNo(Validator): + def validate(self, document): + text = document.text.lower() + if text == 'yes' or text == 'no': + return True + raise ValidationError(message="Must be either yes or no") + + class ValidateYesNoForV3(Validator): + + def __init__(self, *args, **kwargs): + Validator.__init__(*args, **kwargs) + self.caller = args[0] + + def validate(self, document): + text = document.text.lower() + # Raise error if admin tries to disable v3 when v2 + # is already disabled. + if text == 'no' and \ + not self.caller._config_in_progress.get("v2_onion_services"): # noqa: E501 + raise ValidationError(message="Since you disabled v2 onion services, you must enable v3 onion services.") # noqa: E501 + if text == 'yes' or text == 'no': + return True + raise ValidationError(message="Must be either yes or no") + + class ValidateFingerprint(Validator): + def validate(self, document): + text = document.text.replace(' ', '') + if text == '65A1B5FF195B56353CC63DFFCC40EF1228271441': + raise ValidationError( + message='This is the TEST journalist fingerprint') + if text == '600BC6D5142C68F35DDBCEA87B597104EDDDC102': + raise ValidationError( + message='This is the TEST admin fingerprint') + if not re.match('[a-fA-F0-9]{40}$', text): + raise ValidationError( + message='fingerprints must be 40 hexadecimal characters') + return True + + class ValidateOptionalFingerprint(ValidateFingerprint): + def validate(self, document): + if document.text == '': + return True + return super(SiteConfig.ValidateOptionalFingerprint, + self).validate(document) + + class ValidateInt(Validator): + def validate(self, document): + if re.match(r'\d+$', document.text): + return True + raise ValidationError(message="Must be an integer") + + class Locales(object): + def __init__(self, appdir): + self.translation_dir = os.path.realpath( + os.path.join(appdir, 'translations')) + + def get_translations(self): + translations = set(['en_US']) + for dirname in os.listdir(self.translation_dir): + if dirname != 'messages.pot': + translations.add(dirname) + return translations + + class ValidateLocales(Validator): + def __init__(self, basedir): + self.basedir = basedir + super(SiteConfig.ValidateLocales, self).__init__() + + def validate(self, document): + desired = document.text.split() + existing = SiteConfig.Locales(self.basedir).get_translations() + missing = set(desired) - set(existing) + if not missing: + return True + raise ValidationError( + message="The following locales do not exist " + " ".join( + missing)) + + class ValidateOSSECUsername(Validator): + def validate(self, document): + text = document.text + if text and '@' not in text and 'test' != text: + return True + raise ValidationError( + message="The SASL username should not include the domain name") + + class ValidateOSSECPassword(Validator): + def validate(self, document): + text = document.text + if len(text) >= 8 and 'password123' != text: + return True + raise ValidationError( + message="Password for OSSEC email account must be strong") + + class ValidateEmail(Validator): + def validate(self, document): + text = document.text + if text == '': + raise ValidationError( + message=("Must not be empty")) + if '@' not in text: + raise ValidationError( + message=("Must contain a @")) + return True + + class ValidateOSSECEmail(ValidateEmail): + def validate(self, document): + super(SiteConfig.ValidateOSSECEmail, self).validate(document) + text = document.text + if '[email protected]' != text: + return True + raise ValidationError( + message=("Must be set to something other than " + "[email protected]")) + + class ValidateOptionalEmail(ValidateEmail): + def validate(self, document): + if document.text == '': + return True + return super(SiteConfig.ValidateOptionalEmail, self).validate( + document) + + def __init__(self, args): + self.args = args + self.config = {} + # Hold runtime configuration before save, to support + # referencing other responses during validation + self._config_in_progress = {} + translations = SiteConfig.Locales( + self.args.app_path).get_translations() + translations = " ".join(translations) + self.desc = [ + ['ssh_users', 'sd', str, + u'Username for SSH access to the servers', + SiteConfig.ValidateUser(), + None, + lambda config: True], + ['daily_reboot_time', 4, int, + u'Daily reboot time of the server (24-hour clock)', + SiteConfig.ValidateTime(), + int, + lambda config: True], + ['app_ip', '10.20.2.2', str, + u'Local IPv4 address for the Application Server', + SiteConfig.ValidateIP(), + None, + lambda config: True], + ['monitor_ip', '10.20.3.2', str, + u'Local IPv4 address for the Monitor Server', + SiteConfig.ValidateIP(), + None, + lambda config: True], + ['app_hostname', 'app', str, + u'Hostname for Application Server', + SiteConfig.ValidateNotEmpty(), + None, + lambda config: True], + ['monitor_hostname', 'mon', str, + u'Hostname for Monitor Server', + SiteConfig.ValidateNotEmpty(), + None, + lambda config: True], + ['dns_server', '8.8.8.8', str, + u'DNS server specified during installation', + SiteConfig.ValidateNotEmpty(), + None, + lambda config: True], + ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str, + u'Local filepath to public key for ' + 'SecureDrop Application GPG public key', + SiteConfig.ValidatePath(self.args.ansible_path), + None, + lambda config: True], + ['securedrop_app_https_on_source_interface', False, bool, + u'Whether HTTPS should be enabled on ' + 'Source Interface (requires EV cert)', + SiteConfig.ValidateYesNo(), + lambda x: x.lower() == 'yes', + lambda config: True], + ['securedrop_app_https_certificate_cert_src', '', str, + u'Local filepath to HTTPS certificate', + SiteConfig.ValidateOptionalPath(self.args.ansible_path), + None, + lambda config: config.get( + 'securedrop_app_https_on_source_interface')], + ['securedrop_app_https_certificate_key_src', '', str, + u'Local filepath to HTTPS certificate key', + SiteConfig.ValidateOptionalPath(self.args.ansible_path), + None, + lambda config: config.get( + 'securedrop_app_https_on_source_interface')], + ['securedrop_app_https_certificate_chain_src', '', str, + u'Local filepath to HTTPS certificate chain file', + SiteConfig.ValidateOptionalPath(self.args.ansible_path), + None, + lambda config: config.get( + 'securedrop_app_https_on_source_interface')], + ['securedrop_app_gpg_fingerprint', '', str, + u'Full fingerprint for the SecureDrop Application GPG Key', + SiteConfig.ValidateFingerprint(), + self.sanitize_fingerprint, + lambda config: True], + ['ossec_alert_gpg_public_key', 'ossec.pub', str, + u'Local filepath to OSSEC alerts GPG public key', + SiteConfig.ValidatePath(self.args.ansible_path), + None, + lambda config: True], + ['ossec_gpg_fpr', '', str, + u'Full fingerprint for the OSSEC alerts GPG public key', + SiteConfig.ValidateFingerprint(), + self.sanitize_fingerprint, + lambda config: True], + ['ossec_alert_email', '', str, + u'Admin email address for receiving OSSEC alerts', + SiteConfig.ValidateOSSECEmail(), + None, + lambda config: True], + ['journalist_alert_gpg_public_key', '', str, + u'Local filepath to journalist alerts GPG public key (optional)', + SiteConfig.ValidateOptionalPath(self.args.ansible_path), + None, + lambda config: True], + ['journalist_gpg_fpr', '', str, + u'Full fingerprint for the journalist alerts ' + u'GPG public key (optional)', + SiteConfig.ValidateOptionalFingerprint(), + self.sanitize_fingerprint, + lambda config: config.get('journalist_alert_gpg_public_key')], + ['journalist_alert_email', '', str, + u'Email address for receiving journalist alerts (optional)', + SiteConfig.ValidateOptionalEmail(), + None, + lambda config: config.get('journalist_alert_gpg_public_key')], + ['smtp_relay', "smtp.gmail.com", str, + u'SMTP relay for sending OSSEC alerts', + SiteConfig.ValidateNotEmpty(), + None, + lambda config: True], + ['smtp_relay_port', 587, int, + u'SMTP port for sending OSSEC alerts', + SiteConfig.ValidateInt(), + int, + lambda config: True], + ['sasl_domain', "gmail.com", str, + u'SASL domain for sending OSSEC alerts', + None, + None, + lambda config: True], + ['sasl_username', '', str, + u'SASL username for sending OSSEC alerts', + SiteConfig.ValidateOSSECUsername(), + None, + lambda config: True], + ['sasl_password', '', str, + u'SASL password for sending OSSEC alerts', + SiteConfig.ValidateOSSECPassword(), + None, + lambda config: True], + ['enable_ssh_over_tor', True, bool, + u'Enable SSH over Tor (recommended, disables SSH over LAN). ' + u'If you respond no, SSH will be available over LAN only', + SiteConfig.ValidateYesNo(), + lambda x: x.lower() == 'yes', + lambda config: True], + ['securedrop_supported_locales', [], list, + u'Space separated list of additional locales to support ' + '(' + translations + ')', + SiteConfig.ValidateLocales(self.args.app_path), + str.split, + lambda config: True], + ['v2_onion_services', self.check_for_v2_onion(), bool, + u'Do you want to enable v2 onion services (recommended only for SecureDrop instances installed before 1.0.0)?', # noqa: E501 + SiteConfig.ValidateYesNo(), + lambda x: x.lower() == 'yes', + lambda config: True], + ['v3_onion_services', self.check_for_v3_onion, bool, + u'Do you want to enable v3 onion services (recommended)?', + SiteConfig.ValidateYesNoForV3(self), + lambda x: x.lower() == 'yes', + lambda config: True], + ] + + def load_and_update_config(self): + if self.exists(): + self.config = self.load() + + return self.update_config() + + def update_config(self): + self.config.update(self.user_prompt_config()) + self.save() + self.validate_gpg_keys() + self.validate_journalist_alert_email() + self.validate_https_and_v3() + return True + + def validate_https_and_v3(self): + """ + Checks if https is enabled with v3 onion service. + + :returns: False if both v3 and https enabled, True otherwise. + """ + warning_msg = ("You have configured HTTPS on your source interface " + "and v3 onion services. " + "IMPORTANT: Ensure that you update your certificate " + "to include your v3 source URL before advertising " + "it to sources! ") + + if self.config.get("v3_onion_services", False) and \ + self.config.get("securedrop_app_https_certificate_cert_src"): + print(warning_msg) + return False + return True + + def check_for_v2_onion(self): + """ + Check if v2 onion services are already enabled or not. + """ + source_ths = os.path.join(self.args.ansible_path, "app-source-ths") + if os.path.exists(source_ths): # Means old installation + data = "" + with open(source_ths) as fobj: + data = fobj.read() + + data = data.strip() + if len(data) < 56: # Old v2 onion address + return True + return False + + def check_for_v3_onion(self): + """ + Check if v3 onion services should be enabled by default or not. + """ + v2_value = self._config_in_progress.get("v2_onion_services", False) + # We need to see the value in the configuration file + # for v3_onion_services + v3_value = self.config.get("v3_onion_services", True) + return v3_value or not v2_value + + def user_prompt_config(self): + self._config_in_progress = {} + for desc in self.desc: + (var, default, type, prompt, validator, transform, + condition) = desc + if not condition(self._config_in_progress): + self._config_in_progress[var] = '' + continue + self._config_in_progress[var] = self.user_prompt_config_one(desc, + self.config.get(var)) # noqa: E501 + return self._config_in_progress + + def user_prompt_config_one(self, desc, from_config): + (var, default, type, prompt, validator, transform, condition) = desc + if from_config is not None and var != "v3_onion_services": + # v3_onion_services must be true if v2 is disabled by the admin + # otherwise, we may end up in a situation where both v2 and v3 + # are disabled by the admin (by mistake). + default = from_config + prompt += ': ' + + # The following is for the dynamic check of the user input + # for the previous question, as we are calling the default value + # function dynamically, we can get the right value based on the + # previous user input. + if callable(default): + default = default() + return self.validated_input(prompt, default, validator, transform) + + def validated_input(self, prompt, default, validator, transform): + if type(default) is bool: + default = default and 'yes' or 'no' + if type(default) is int: + default = str(default) + if isinstance(default, list): + default = " ".join(default) + if type(default) is not str: + default = str(default) + kwargs = {} + if validator: + kwargs['validator'] = validator + value = prompt_toolkit.prompt(prompt, + default=default, + **kwargs) + if transform: + return transform(value) + else: + return value + + def sanitize_fingerprint(self, value): + return value.upper().replace(' ', '') + + def validate_gpg_keys(self): + keys = (('securedrop_app_gpg_public_key', + 'securedrop_app_gpg_fingerprint'), + + ('ossec_alert_gpg_public_key', + 'ossec_gpg_fpr'), + + ('journalist_alert_gpg_public_key', + 'journalist_gpg_fpr')) + validate = os.path.join( + os.path.dirname(__file__), '..', 'bin', + 'validate-gpg-key.sh') + for (public_key, fingerprint) in keys: + if (self.config[public_key] == '' and + self.config[fingerprint] == ''): + continue + public_key = os.path.join(self.args.ansible_path, + self.config[public_key]) + fingerprint = self.config[fingerprint] + try: + sdlog.debug(subprocess.check_output( + [validate, public_key, fingerprint], + stderr=subprocess.STDOUT)) + except subprocess.CalledProcessError as e: + sdlog.debug(e.output) + raise FingerprintException( + "fingerprint {} ".format(fingerprint) + + "does not match " + + "the public key {}".format(public_key)) + return True + + def validate_journalist_alert_email(self): + if (self.config['journalist_alert_gpg_public_key'] == '' and + self.config['journalist_gpg_fpr'] == ''): + return True + + class Document(object): + def __init__(self, text): + self.text = text + + try: + SiteConfig.ValidateEmail().validate(Document( + self.config['journalist_alert_email'])) + except ValidationError as e: + raise JournalistAlertEmailException( + "journalist alerts email: " + e.message) + return True + + def exists(self): + return os.path.exists(self.args.site_config) + + def save(self): + with io.open(self.args.site_config, 'w') as site_config_file: + yaml.safe_dump(self.config, + site_config_file, + default_flow_style=False) + + def load(self): + try: + with io.open(self.args.site_config) as site_config_file: + return yaml.safe_load(site_config_file) + except IOError: + sdlog.error("Config file missing, re-run with sdconfig") + raise + except yaml.YAMLError: + sdlog.error("There was an issue processing {}".format( + self.args.site_config)) + raise + + +def setup_logger(verbose=False): + """ Configure logging handler """ + # Set default level on parent + sdlog.setLevel(logging.DEBUG) + level = logging.DEBUG if verbose else logging.INFO + + stdout = logging.StreamHandler(sys.stdout) + stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) + stdout.setLevel(level) + sdlog.addHandler(stdout) + + +def sdconfig(args): + """Configure SD site settings""" + SiteConfig(args).load_and_update_config() + return 0 + + +def generate_new_v3_keys(): + """This function generate new keys for Tor v3 onion + services and returns them as as tuple. + + :returns: Tuple(public_key, private_key) + """ + + private_key = x25519.X25519PrivateKey.generate() + private_bytes = private_key.private_bytes( + encoding=serialization.Encoding.Raw , + format=serialization.PrivateFormat.Raw, + encryption_algorithm=serialization.NoEncryption()) + public_key = private_key.public_key() + public_bytes = public_key.public_bytes( + encoding=serialization.Encoding.Raw, + format=serialization.PublicFormat.Raw) + + # Base32 encode and remove base32 padding characters (`=`) + # Using try/except blocks for Python 2/3 support. + try: + public = base64.b32encode(public_bytes).replace('=', '') \ + .decode("utf-8") + except TypeError: + public = base64.b32encode(public_bytes).replace(b'=', b'') \ + .decode("utf-8") + try: + private = base64.b32encode(private_bytes).replace('=', '') \ + .decode("utf-8") + except TypeError: + private = base64.b32encode(private_bytes).replace(b'=', b'') \ + .decode("utf-8") + return public, private + + +def find_or_generate_new_torv3_keys(args): + """ + This method will either read v3 Tor onion service keys if found or generate + a new public/private keypair. + """ + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + if os.path.exists(secret_key_path): + print('Tor v3 onion service keys already exist in: {}'.format( + secret_key_path)) + return 0 + # No old keys, generate and store them first + app_journalist_public_key, \ + app_journalist_private_key = generate_new_v3_keys() + # For app ssh service + app_ssh_public_key, app_ssh_private_key = generate_new_v3_keys() + # For mon ssh service + mon_ssh_public_key, mon_ssh_private_key = generate_new_v3_keys() + tor_v3_service_info = { + "app_journalist_public_key": app_journalist_public_key, + "app_journalist_private_key": app_journalist_private_key, + "app_ssh_public_key": app_ssh_public_key, + "app_ssh_private_key": app_ssh_private_key, + "mon_ssh_public_key": mon_ssh_public_key, + "mon_ssh_private_key": mon_ssh_private_key, + } + with open(secret_key_path, 'w') as fobj: + json.dump(tor_v3_service_info, fobj, indent=4) + print('Tor v3 onion service keys generated and stored in: {}'.format( + secret_key_path)) + return 0 + + +def install_securedrop(args): + """Install/Update SecureDrop""" + SiteConfig(args).load() + + sdlog.info("Now installing SecureDrop on remote servers.") + sdlog.info("You will be prompted for the sudo password on the " + "servers.") + sdlog.info("The sudo password is only necessary during initial " + "installation.") + return subprocess.check_call([os.path.join(args.ansible_path, + 'securedrop-prod.yml'), '--ask-become-pass'], + cwd=args.ansible_path) + + +def backup_securedrop(args): + """Perform backup of the SecureDrop Application Server. + Creates a tarball of submissions and server config, and fetches + back to the Admin Workstation. Future `restore` actions can be performed + with the backup tarball.""" + sdlog.info("Backing up the SecureDrop Application Server") + ansible_cmd = [ + 'ansible-playbook', + os.path.join(args.ansible_path, 'securedrop-backup.yml'), + ] + return subprocess.check_call(ansible_cmd, cwd=args.ansible_path) + + +def restore_securedrop(args): + """Perform restore of the SecureDrop Application Server. + Requires a tarball of submissions and server config, created via + the `backup` action.""" + sdlog.info("Restoring the SecureDrop Application Server from backup") + # Canonicalize filepath to backup tarball, so Ansible sees only the + # basename. The files must live in args.ansible_path, + # but the securedrop-admin + # script will be invoked from the repo root, so preceding dirs are likely. + restore_file_basename = os.path.basename(args.restore_file) + + # Would like readable output if there's a problem + os.environ["ANSIBLE_STDOUT_CALLBACK"] = "debug" + + ansible_cmd = [ + 'ansible-playbook', + os.path.join(args.ansible_path, 'securedrop-restore.yml'), + '-e', + "restore_file='{}'".format(restore_file_basename), + ] + return subprocess.check_call(ansible_cmd, cwd=args.ansible_path) + + +def run_tails_config(args): + """Configure Tails environment post SD install""" + sdlog.info("Configuring Tails workstation environment") + sdlog.info(("You'll be prompted for the temporary Tails admin password," + " which was set on Tails login screen")) + ansible_cmd = [ + os.path.join(args.ansible_path, 'securedrop-tails.yml'), + "--ask-become-pass", + # Passing an empty inventory file to override the automatic dynamic + # inventory script, which fails if no site vars are configured. + '-i', '/dev/null', + ] + return subprocess.check_call(ansible_cmd, + cwd=args.ansible_path) + + +def check_for_updates_wrapper(args): + res, tag = check_for_updates(args) + # Because the command worked properly exit with 0. + return 0 + + +def check_for_updates(args): + """Check for SecureDrop updates""" + sdlog.info("Checking for SecureDrop updates...") + + # Determine what branch we are on + current_tag = subprocess.check_output(['git', 'describe'], + cwd=args.root).decode('utf-8').rstrip('\n') # noqa: E501 + + # Fetch all branches + git_fetch_cmd = ['git', 'fetch', '--all'] + subprocess.check_call(git_fetch_cmd, cwd=args.root) + + # Get latest tag + git_all_tags = ["git", "tag"] + all_tags = subprocess.check_output(git_all_tags, + cwd=args.root).decode('utf-8').rstrip('\n').split('\n') # noqa: E501 + + # Do not check out any release candidate tags + all_prod_tags = [x for x in all_tags if 'rc' not in x] + + # We want the tags to be sorted based on semver + all_prod_tags.sort(key=parse_version) + + latest_tag = all_prod_tags[-1] + + if current_tag != latest_tag: + sdlog.info("Update needed") + return True, latest_tag + sdlog.info("All updates applied") + return False, latest_tag + + +def get_release_key_from_keyserver(args, keyserver=None, timeout=45): + gpg_recv = ['timeout', str(timeout), 'gpg', '--batch', '--no-tty', + '--recv-key'] + release_key = [RELEASE_KEY] + + # We construct the gpg --recv-key command based on optional keyserver arg. + if keyserver: + get_key_cmd = gpg_recv + ['--keyserver', keyserver] + release_key + else: + get_key_cmd = gpg_recv + release_key + + subprocess.check_call(get_key_cmd, cwd=args.root) + + +def update(args): + """Verify, and apply latest SecureDrop workstation update""" + sdlog.info("Applying SecureDrop updates...") + + update_status, latest_tag = check_for_updates(args) + + if not update_status: + # Exit if we're up to date + return 0 + + sdlog.info("Verifying signature on latest update...") + + # Retrieve key from openpgp.org keyserver + get_release_key_from_keyserver(args, + keyserver=DEFAULT_KEYSERVER) + + git_verify_tag_cmd = ['git', 'tag', '-v', latest_tag] + try: + sig_result = subprocess.check_output(git_verify_tag_cmd, + stderr=subprocess.STDOUT, + cwd=args.root).decode('utf-8') + + good_sig_text = ['Good signature from "SecureDrop Release Signing ' + + 'Key"', + 'Good signature from "SecureDrop Release Signing ' + + 'Key <[email protected]>"'] + bad_sig_text = 'BAD signature' + gpg_lines = sig_result.split('\n') + + # Check if any strings in good_sig_text match against gpg_lines[] + good_sig_matches = [s for s in gpg_lines if + any(xs in s for xs in good_sig_text)] + + # To ensure that an adversary cannot name a malicious key good_sig_text + # we check that bad_sig_text does not appear, that the release key + # appears on the second line of the output, and that there is a single + # match from good_sig_text[] + if RELEASE_KEY in gpg_lines[1] and \ + len(good_sig_matches) == 1 and \ + bad_sig_text not in sig_result: + # Finally, we check that there is no branch of the same name + # prior to reporting success. + cmd = ['git', 'show-ref', '--heads', '--verify', + 'refs/heads/{}'.format(latest_tag)] + try: + # We expect this to produce a non-zero exit code, which + # will produce a subprocess.CalledProcessError + subprocess.check_output(cmd, stderr=subprocess.STDOUT, + cwd=args.root) + sdlog.info("Signature verification failed.") + return 1 + except subprocess.CalledProcessError as e: + if 'not a valid ref' in e.output.decode('utf-8'): + # Then there is no duplicate branch. + sdlog.info("Signature verification successful.") + else: # If any other exception occurs, we bail. + sdlog.info("Signature verification failed.") + return 1 + else: # If anything else happens, fail and exit 1 + sdlog.info("Signature verification failed.") + return 1 + + except subprocess.CalledProcessError: + # If there is no signature, or if the signature does not verify, + # then git tag -v exits subprocess.check_output will exit 1 + # and subprocess.check_output will throw a CalledProcessError + sdlog.info("Signature verification failed.") + return 1 + + # Only if the proper signature verifies do we check out the latest + git_checkout_cmd = ['git', 'checkout', latest_tag] + subprocess.check_call(git_checkout_cmd, cwd=args.root) + + sdlog.info("Updated to SecureDrop {}.".format(latest_tag)) + return 0 + + +def get_logs(args): + """Get logs for forensics and debugging purposes""" + sdlog.info("Gathering logs for forensics and debugging") + ansible_cmd = [ + 'ansible-playbook', + os.path.join(args.ansible_path, 'securedrop-logs.yml'), + ] + subprocess.check_call(ansible_cmd, cwd=args.ansible_path) + sdlog.info("Please send the encrypted logs to [email protected] or " + "upload them to the SecureDrop support portal: " + SUPPORT_URL) + return 0 + + +def set_default_paths(args): + if not args.ansible_path: + args.ansible_path = args.root + "/install_files/ansible-base" + args.ansible_path = os.path.realpath(args.ansible_path) + if not args.site_config: + args.site_config = args.ansible_path + "/group_vars/all/site-specific" + args.site_config = os.path.realpath(args.site_config) + if not args.app_path: + args.app_path = args.root + "/securedrop" + args.app_path = os.path.realpath(args.app_path) + return args + + +def reset_admin_access(args): + """Resets SSH access to the SecureDrop servers, locking it to + this Admin Workstation.""" + sdlog.info("Resetting SSH access to the SecureDrop servers") + ansible_cmd = [ + 'ansible-playbook', + os.path.join(args.ansible_path, 'securedrop-reset-ssh-key.yml'), + ] + return subprocess.check_call(ansible_cmd, cwd=args.ansible_path) + + +def parse_argv(argv): + class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter, + argparse.RawTextHelpFormatter): + """Needed to combine formatting classes for help output""" + pass + + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=ArgParseFormatterCombo) + parser.add_argument('-v', action='store_true', default=False, + help="Increase verbosity on output") + parser.add_argument('-d', action='store_true', default=False, + help="Developer mode. Not to be used in production.") + parser.add_argument('--root', required=True, + help="path to the root of the SecureDrop repository") + parser.add_argument('--site-config', + help="path to the YAML site configuration file") + parser.add_argument('--ansible-path', + help="path to the Ansible root") + parser.add_argument('--app-path', + help="path to the SecureDrop application root") + subparsers = parser.add_subparsers() + + parse_sdconfig = subparsers.add_parser('sdconfig', help=sdconfig.__doc__) + parse_sdconfig.set_defaults(func=sdconfig) + + parse_install = subparsers.add_parser('install', + help=install_securedrop.__doc__) + parse_install.set_defaults(func=install_securedrop) + + parse_tailsconfig = subparsers.add_parser('tailsconfig', + help=run_tails_config.__doc__) + parse_tailsconfig.set_defaults(func=run_tails_config) + + parse_generate_tor_keys = subparsers.add_parser( + 'generate_v3_keys', + help=find_or_generate_new_torv3_keys.__doc__) + parse_generate_tor_keys.set_defaults(func=find_or_generate_new_torv3_keys) + + parse_backup = subparsers.add_parser('backup', + help=backup_securedrop.__doc__) + parse_backup.set_defaults(func=backup_securedrop) + + parse_restore = subparsers.add_parser('restore', + help=restore_securedrop.__doc__) + parse_restore.set_defaults(func=restore_securedrop) + parse_restore.add_argument("restore_file") + + parse_update = subparsers.add_parser('update', help=update.__doc__) + parse_update.set_defaults(func=update) + + parse_check_updates = subparsers.add_parser('check_for_updates', + help=check_for_updates.__doc__) + parse_check_updates.set_defaults(func=check_for_updates_wrapper) + + parse_logs = subparsers.add_parser('logs', + help=get_logs.__doc__) + parse_logs.set_defaults(func=get_logs) + + parse_reset_ssh = subparsers.add_parser('reset_admin_access', + help=reset_admin_access.__doc__) + parse_reset_ssh.set_defaults(func=reset_admin_access) + + args = parser.parse_args(argv) + if getattr(args, 'func', None) is None: + print('Please specify an operation.\n') + parser.print_help() + sys.exit(1) + return set_default_paths(args) + + +def main(argv): + args = parse_argv(argv) + setup_logger(args.v) + if args.v: + return_code = args.func(args) + if return_code != 0: + sys.exit(EXIT_SUBPROCESS_ERROR) + else: + try: + return_code = args.func(args) + except KeyboardInterrupt: + print('Process was interrupted.') + sys.exit(EXIT_INTERRUPT) + except subprocess.CalledProcessError as e: + print('ERROR (run with -v for more): {msg}'.format(msg=e), + file=sys.stderr) + sys.exit(EXIT_SUBPROCESS_ERROR) + except Exception as e: + raise SystemExit( + 'ERROR (run with -v for more): {msg}'.format(msg=e)) + if return_code == 0: + sys.exit(EXIT_SUCCESS) + else: + sys.exit(EXIT_SUBPROCESS_ERROR) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/admin/setup.py b/admin/setup.py new file mode 100644 --- /dev/null +++ b/admin/setup.py @@ -0,0 +1,22 @@ +# +# Copyright (C) 2013-2018 Freedom of the Press Foundation & al +# Copyright (C) 2018 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +import setuptools + +setuptools.setup( + setup_requires=['d2to1', 'pbr'], + d2to1=True) diff --git a/testinfra/combine-junit.py b/devops/scripts/combine-junit.py similarity index 78% rename from testinfra/combine-junit.py rename to devops/scripts/combine-junit.py --- a/testinfra/combine-junit.py +++ b/devops/scripts/combine-junit.py @@ -36,7 +36,11 @@ def merge_results(xml_files): cases = [] for file_name in xml_files: - tree = ET.parse(file_name) + # We disable bandit checking to permit B314, which recommends use + # of defusedxml to protect against malicious XML files. This code + # path only runs in CI, not on developer workstations, and the XML + # output is generated by testinfra on staging machines. + tree = ET.parse(file_name) # nosec test_suite = tree.getroot() failures += int(test_suite.attrib['failures']) tests += int(test_suite.attrib['tests']) @@ -57,7 +61,7 @@ def merge_results(xml_files): def usage(): this_file = os.path.basename(__file__) - print 'Usage: %s results1.xml results2.xml' % this_file + print('Usage: %s results1.xml results2.xml' % this_file) if __name__ == '__main__': diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -12,9 +12,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os -import shlex # Detect if we're being built by Read the Docs # https://docs.readthedocs.org/en/latest/faq.html#how-do-i-change-behavior-for-read-the-docs @@ -23,12 +21,12 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -44,14 +42,14 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'SecureDrop' -copyright = u'2017, Freedom of the Press Foundation' +copyright = u'2015-2019, Freedom of the Press Foundation' author = u'SecureDrop Team and Contributors' # The version info for the project you're documenting, acts as replacement for @@ -59,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '0.5.2' +version = '1.2.2' # The full version, including alpha/beta/rc tags. -release = '0.5.2' +release = '1.2.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -72,9 +70,9 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -82,27 +80,27 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -132,17 +130,17 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. @@ -151,7 +149,7 @@ # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -161,62 +159,62 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'SecureDropdoc' @@ -224,17 +222,17 @@ # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', -# Latex figure (float) alignment -#'figure_align': 'htbp', + # Latex figure (float) alignment + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -247,23 +245,23 @@ # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -276,7 +274,7 @@ ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -291,13 +289,24 @@ ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False + +# -- Options for linkcheck -- + +linkcheck_retries = 3 + +linkcheck_ignore = [ + r'http://127.0.0.1(:\d+)?/?', + r'http://localhost(:\d+)?/?', + 'https://forum.securedrop.org/admin/users/list/active', + 'https://weblate.securedrop.org/projects/securedrop/securedrop/#repository', +] diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py --- a/install_files/ansible-base/callback_plugins/ansible_version_check.py +++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py @@ -1,5 +1,6 @@ # -*- encoding:utf-8 -*- -from __future__ import absolute_import, division, print_function, unicode_literals +from __future__ import absolute_import, division, print_function, \ + unicode_literals import sys @@ -18,11 +19,13 @@ def print_red_bold(text): class CallbackModule(CallbackBase): def __init__(self): - # Can't use `on_X` because this isn't forwards compatible with Ansible 2.0+ - required_version = '2.3.2' # Keep synchronized with group_vars/all/main.yml + # Can't use `on_X` because this isn't forwards compatible + # with Ansible 2.0+ + required_version = '2.7.13' # Keep synchronized with requirements files if not ansible.__version__.startswith(required_version): print_red_bold( - "SecureDrop restriction: only Ansible {version}.* is supported. " + "SecureDrop restriction: only Ansible {version}.*" + "is supported." .format(version=required_version) ) sys.exit(1) diff --git a/install_files/ansible-base/callback_plugins/profile_tasks.py b/install_files/ansible-base/callback_plugins/profile_tasks.py deleted file mode 100644 --- a/install_files/ansible-base/callback_plugins/profile_tasks.py +++ /dev/null @@ -1,73 +0,0 @@ -# Source: https://github.com/jlafon/ansible-profile -# License: MIT -# More info: http://jlafon.io/ansible-profiling.html -# The profiling functionality will be provided by Ansible v2, -# since this callback_plugin has been merged into core, -# but we're including here to support older versions of Ansible. -import datetime -import os -import time - - -class CallbackModule(object): - """ - A plugin for timing tasks - """ - def __init__(self): - self.stats = {} - self.current = None - - def playbook_on_task_start(self, name, is_conditional): - """ - Logs the start of each task - """ - - if os.getenv("ANSIBLE_PROFILE_DISABLE") is not None: - return - - if self.current is not None: - # Record the running time of the last executed task - self.stats[self.current] = time.time() - self.stats[self.current] - - # Record the start time of the current task - self.current = name - self.stats[self.current] = time.time() - - def playbook_on_stats(self, stats): - """ - Prints the timings - """ - - if os.getenv("ANSIBLE_PROFILE_DISABLE") is not None: - return - - # Record the timing of the very last task - if self.current is not None: - self.stats[self.current] = time.time() - self.stats[self.current] - - # Sort the tasks by their running time - results = sorted( - self.stats.items(), - key=lambda value: value[1], - reverse=True, - ) - - # Just keep the top 10 - results = results[:10] - - # Print the timings - for name, elapsed in results: - print( - "{0:-<70}{1:->9}".format( - '{0} '.format(name), - ' {0:.02f}s'.format(elapsed), - ) - ) - - total_seconds = sum([x[1] for x in self.stats.items()]) - print("\nPlaybook finished: {0}, {1} total tasks. {2} elapsed. \n".format( - time.asctime(), - len(self.stats.items()), - datetime.timedelta(seconds=(int(total_seconds))) - ) - ) diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py --- a/install_files/ansible-base/roles/backup/files/0.3_collect.py +++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2.7 +#!/opt/venvs/securedrop-app-code/bin/python """ This script should be copied to the App server and ran by the anisble @@ -9,15 +9,13 @@ import sys import os -import re +import io import zipfile from datetime import datetime -import functools # Import the application config.py file sys.path.append("/var/www/securedrop") -import config -import gnupg -import subprocess +import config # noqa: F403 +import gnupg # noqa: F403 TOR_SERVICES = "/var/lib/tor/services" TOR_CONFIG = "/etc/tor/torrc" @@ -41,7 +39,7 @@ def collect_custom_header_image(zf): def collect_tor_files(zf): - # All of the tor hidden service private keys are stored in the THS specific + # All of the tor Onion Service private keys are stored in the THS specific # subdirectory `/var/lib/tor/services` backing up this directory will back # up all of the THS and ATHS required keys needed to restore all the hidden # services on that system. @@ -61,7 +59,7 @@ def encrypt_zip_file(zf_fn): gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR) e_fn = '{}.gpg'.format(zf_fn) - stream = open(zf_fn, "rb") + stream = io.open(zf_fn, "rb") gpg.encrypt_file(stream, config.JOURNALIST_KEY, always_trust='True', output=e_fn) @@ -76,7 +74,8 @@ def main(): collect_custom_header_image(zf) collect_tor_files(zf) encrypt_zip_file(zf_fn) - print zf_fn + print(zf_fn) + if __name__ == "__main__": main() diff --git a/install_files/ansible-base/roles/backup/files/backup.py b/install_files/ansible-base/roles/backup/files/backup.py --- a/install_files/ansible-base/roles/backup/files/backup.py +++ b/install_files/ansible-base/roles/backup/files/backup.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2.7 +#!/opt/venvs/securedrop-app-code/bin/python """ This script is copied to the App server and run by the Ansible playbook. When run (as root), it collects all of the necessary information to backup the 0.3 @@ -9,6 +9,7 @@ import os import tarfile + def main(): backup_filename = 'sd-backup-{}.tar.gz'.format( datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) @@ -30,7 +31,8 @@ def main(): backup.add(tor_hidden_services) backup.add(torrc) - print backup_filename + print(backup_filename) + if __name__ == "__main__": main() diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py --- a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py +++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py @@ -14,7 +14,7 @@ ossec_version: description: - version number of release to download - default: "2.8.2" + default: "3.0.0" required: no notes: - The OSSEC version to download is hardcoded to avoid surprises. @@ -23,96 +23,68 @@ ''' EXAMPLES = ''' - ossec_urls: - ossec_version: "2.8.2" + ossec_version: "3.0.0" ''' -from StringIO import StringIO -from urlparse import urljoin -import re +import re # noqa: F401 + HAS_REQUESTS = True try: - import requests + import requests # noqa: F401 except ImportError: HAS_REQUESTS = False - class OSSECURLs(): def __init__(self, ossec_version): + self.REPO_URL = "https://github.com/ossec/ossec-hids" self.ossec_version = ossec_version - - checksums = self.parse_checksums() - self.ansible_facts = dict( ossec_version=self.ossec_version, ossec_tarball_filename=self.ossec_tarball_filename, ossec_tarball_url=self.ossec_tarball_url, - ossec_checksum_filename=self.ossec_checksum_filename, - ossec_checksum_url=self.ossec_checksum_url, + ossec_signature_filename=self.ossec_signature_filename, + ossec_signature_url=self.ossec_signature_url, ) - self.ansible_facts.update(checksums) - @property def ossec_tarball_filename(self): return "ossec-hids-{}.tar.gz".format(self.ossec_version) - @property def ossec_tarball_url(self): - return "https://github.com/ossec/ossec-hids/archive/{}.tar.gz".format( - self.ossec_version) - + return self.REPO_URL + "/archive/{}.tar.gz".format(self.ossec_version) @property - def ossec_checksum_url(self): - return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format( - self.ossec_version, self.ossec_checksum_filename) - + def ossec_signature_url(self): + return self.REPO_URL + "/releases/download/{}/{}".format( + self.ossec_version, self.ossec_signature_filename) @property - def ossec_checksum_filename(self): - return "{}-checksum.txt".format(self.ossec_tarball_filename) - - - def parse_checksums(self): - r = requests.get(self.ossec_checksum_url) - checksum_regex = re.compile(r''' - ^MD5\( - ''' - +re.escape(self.ossec_tarball_filename)+ - r'''\)=\s+(?P<ossec_md5_checksum>[0-9a-f]{32})\s+ - SHA1\( - ''' - +re.escape(self.ossec_tarball_filename)+ - r'''\)=\s+(?P<ossec_sha1_checksum>[0-9a-f]{40})$ - ''', re.VERBOSE | re.MULTILINE - ) - checksum_list = r.content.rstrip() - results = re.match(checksum_regex, checksum_list).groupdict() - return results + def ossec_signature_filename(self): + return "ossec-hids-{}.tar.gz.asc".format(self.ossec_version) def main(): - module = AnsibleModule( + module = AnsibleModule( # noqa: F405 argument_spec=dict( - ossec_version=dict(default="2.8.2" ), + ossec_version=dict(default="3.0.0"), ), supports_check_mode=False ) if not HAS_REQUESTS: - module.fail_json(msg='requests required for this module') + module.fail_json(msg='requests required for this module') ossec_version = module.params['ossec_version'] try: ossec_config = OSSECURLs(ossec_version=ossec_version) - except: + except: # noqa: E722 msg = ("Failed to find checksum information for OSSEC v{}." - "Ensure you have the proper release specified, " - "and check the download page to confirm: " - "http://www.ossec.net/?page_id=19".format(ossec_version)) + "Ensure you have the proper release specified, " + "and check the download page to confirm: " + "http://www.ossec.net/?page_id=19".format(ossec_version)) module.fail_json(msg=msg) results = ossec_config.ansible_facts @@ -124,5 +96,5 @@ def main(): module.fail_json(msg=msg) -from ansible.module_utils.basic import * +from ansible.module_utils.basic import * # noqa E402,F403 main() diff --git a/install_files/ansible-base/roles/restore/files/0.3_restore.py b/install_files/ansible-base/roles/restore/files/0.3_restore.py deleted file mode 100755 --- a/install_files/ansible-base/roles/restore/files/0.3_restore.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/python2.7 -""" - -This script and decrypted backup zip should be copied to the App server -and run by the anisble plabook. When run (as root), it restores the 0.3 -backup file. - -python 0.3_restore.py sd-backup-TIMESTAMP.zip - -""" - -import sys -import os -import re -import zipfile -import subprocess -import shutil -from datetime import datetime -from operator import itemgetter -import calendar -import traceback - - -def replace_prefix(path, p1, p2): - """ - Replace p1 in path with p2 - - >>> replace_prefix("/tmp/files/foo.bar", "/tmp", "/home/me") - "home/me/files/foo.bar" - """ - common_prefix = os.path.commonprefix([path, p1]) - if common_prefix: - assert path.find(common_prefix) == 0 - # +1 so chop off the next path separator, which otherwise becomes a - # leading path separate and confuses os.path.join - path = path[len(common_prefix)+1:] - return os.path.join(p2, path) - - -def extract_to_path(archive, member, path, user): - """ - Extract from the zip archive `archive` the member `member` and write it to - `path`, preserving file metadata and chown'ing the file using `user` - """ - # Create all upper directories if necessary - upperdirs = os.path.dirname(path) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - with archive.open(member) as source, file(path, "wb") as target: - shutil.copyfileobj(source, target) - - # Update the timestamps as well (as best we can, thanks, conversion to - # localtime). This only actually works if the .zip was created on a - # machine where the timezone was set to UTC, but it might be good - # enough since we just need the relative order of timestamps (they will - # all be normalized anyway). - if hasattr(member, 'date_time'): - timestamp = calendar.timegm(member.date_time) - os.utime(path, (timestamp, timestamp)) - - ug = "{}:{}".format(user, user) - subprocess.call(['chown', '-R', ug, path]) - - -def restore_config_file(zf): - print "* Migrating SecureDrop config file from backup..." - - # Restore the original config file - for zi in zf.infolist(): - if "var/www/securedrop/config.py" in zi.filename: - extract_to_path(zf, "var/www/securedrop/config.py", - "/var/www/securedrop/config.py", "www-data") - - -def restore_securedrop_root(zf): - print "* Migrating directories from SECUREDROP_ROOT..." - - # Restore the original source directories and key files - for zi in zf.infolist(): - if "var/lib/securedrop/store" in zi.filename: - extract_to_path(zf, zi, - replace_prefix(zi.filename, - "var/lib/securedrop/store", - "/var/lib/securedrop/store"), - "www-data") - elif "var/lib/securedrop/keys" in zi.filename: - # TODO: is it a bad idea to migrate the random_seed from the - # previous installation? - extract_to_path(zf, zi, - replace_prefix(zi.filename, - "var/lib/securedrop/keys", - "/var/lib/securedrop/keys"), - "www-data") - - -def restore_database(zf): - print "* Migrating database..." - - extract_to_path(zf, "var/lib/securedrop/db.sqlite", - "/var/lib/securedrop/db.sqlite", "www-data") - - -def restore_custom_header_image(zf): - print "* Migrating custom header image..." - extract_to_path(zf, - "var/www/securedrop/static/i/logo.png", - "/var/www/securedrop/static/i/logo.png", "www-data") - - -def restore_tor_files(zf): - tor_root_dir = "/var/lib/tor" - ths_root_dir = os.path.join(tor_root_dir, "services") - source_ths_dir = os.path.join(ths_root_dir, "source") - journalist_ths_dir = os.path.join(ths_root_dir, "journalist") - - print "* Deleting previous source THS interface..." - - for fn in os.listdir(source_ths_dir): - os.remove(os.path.join(source_ths_dir, fn)) - - print "* Deleting previous journalist ATHS interface..." - - for fn in os.listdir(journalist_ths_dir): - os.remove(os.path.join(journalist_ths_dir, fn)) - - print "* Migrating source and journalist interface .onion..." - - for zi in zf.infolist(): - if "var/lib/tor/services/source" in zi.filename: - extract_to_path(zf, zi, - replace_prefix(zi.filename, - "var/lib/tor/services/source", - "/var/lib/tor/services/source"), - "debian-tor") - elif "var/lib/tor/services/journalist" in zi.filename: - extract_to_path(zf, zi, - replace_prefix(zi.filename, - "var/lib/tor/services/journalist", - "/var/lib/tor/services/journalist"), - "debian-tor") - - # Reload Tor to trigger registering the old Tor Hidden Services - # reloading Tor compared to restarting tor will not break the current tor - # connections for SSH - subprocess.call(['service', 'tor', 'reload']) - - -def main(): - if len(sys.argv) <= 1: - print ("Usage: 0.3_restore.py <filename>\n\n" - " <filename>\tPath to a SecureDrop 0.3 backup .zip file" - "created by 0.3_collect.py") - sys.exit(1) - - try: - zf_fn = sys.argv[1] - with zipfile.ZipFile(zf_fn, 'r') as zf: - restore_config_file(zf) - restore_securedrop_root(zf) - restore_database(zf) - restore_custom_header_image(zf) - restore_tor_files(zf) - except: - print "\n!!! Something went wrong, please file an issue.\n" - print traceback.format_exc() - else: - print "Done!" - -if __name__ == "__main__": - main() diff --git a/install_files/ansible-base/roles/restore/files/compare_torrc.py b/install_files/ansible-base/roles/restore/files/compare_torrc.py new file mode 100644 --- /dev/null +++ b/install_files/ansible-base/roles/restore/files/compare_torrc.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# +# Compares Tor configurations on the app server and from a backup. If +# restoring the backup would alter the server's Tor configuration, +# print a warning and exit. +# + +from __future__ import print_function + +import os +import re +import sys + + +def get_tor_versions(path): + """ + Determine which service versions are offered in the given torrc. + """ + service_re = re.compile(r"HiddenServiceDir\s+(?:.*)/(.*)") + versions = set([]) + with open(path) as f: + for line in f: + m = service_re.match(line) + if m: + service = m.group(1) + if "v3" in service: + versions.add(3) + else: + versions.add(2) + + return versions + + +def strset(s): + """ + Sort the given set and join members with "and". + """ + return " and ".join(str(v) for v in sorted(s)) + + +if __name__ == "__main__": + tempdir = sys.argv[1] + + server_versions = get_tor_versions(os.path.join(tempdir, "app/etc/tor/torrc")) + backup_versions = get_tor_versions(os.path.join(tempdir, "backup/etc/tor/torrc")) + + if server_versions == backup_versions: + print("The Tor configuration in the backup matches the server.") + sys.exit(0) + + print( + "The Tor configuration on the app server offers version {} services.".format( + strset(server_versions) + ) + ) + + print( + "The Tor configuration in this backup offers version {} services.".format( + strset(backup_versions) + ) + ) + + print("\nRestoring a backup with a different Tor configuration than the server ") + print("is currently unsupported. If you require technical assistance, please ") + print("contact the SecureDrop team via the support portal or at ") + print("[email protected].") + + sys.exit(1) diff --git a/install_files/ansible-base/roles/restore/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py deleted file mode 100755 --- a/install_files/ansible-base/roles/restore/files/restore.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/python2.7 -""" -This script and backup archive should be copied to the App server and run by -the Ansible playbook. When run (as root), it restores the contents of the 0.3 -backup file to the machine it's run on. - -python restore.py sd-backup-TIMESTAMP.tar.gz -""" - -import os -import subprocess -import sys -import tarfile - - -def verify_args(): - usage = """ -Usage: restore.py <backup file> - - <backup file> Path to a SecureDrop 0.3 backup created by backup.py" - """ - if len(sys.argv) != 2: - print(usage) - sys.exit(1) - - if not os.path.exists(sys.argv[1]): - print("<backup file> '{}' not found".format(sys.argv(1))) - sys.exit(1) - - if os.geteuid() != 0: - print("This program must be run as root!") - sys.exit(1) - - -def main(): - verify_args() - - with tarfile.open(sys.argv[1], 'r:*') as backup: - # This assumes that both the old installation (source of the backup) - # and the new installation (destination of the restore) used the - # default paths for various locations. - backup.extractall(path='/') - - # Reload Tor and the web server so they pick up the new configuration - # If the process exits with a non-zero return code, raises an exception. - subprocess.check_call(['service', 'apache2', 'restart']) - subprocess.check_call(['service', 'tor', 'reload']) - -if __name__ == "__main__": - main() diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py --- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py +++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py @@ -1,11 +1,14 @@ -#!/usr/bin/python +#!/usr/bin/python3 import grp import os +import io import pwd import sys import subprocess +from shutil import copyfile + # check for root if os.geteuid() != 0: @@ -16,34 +19,80 @@ path_torrc_backup = '/etc/tor/torrc.bak' path_torrc = '/etc/tor/torrc' path_desktop = '/home/amnesia/Desktop/' -path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/' +path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/' # noqa: E501 +path_securedrop_root = '/home/amnesia/Persistent/securedrop' +path_securedrop_admin_venv = os.path.join(path_securedrop_root, + 'admin/.venv3/bin/python') +path_securedrop_admin_init = os.path.join(path_securedrop_root, + 'admin/securedrop_admin/__init__.py') +path_gui_updater = os.path.join(path_securedrop_root, + 'journalist_gui/SecureDropUpdater') + +paths_v3_authfiles = { + "app-journalist": os.path.join(path_securedrop_root, + 'install_files/ansible-base/app-journalist.auth_private'), + "app-ssh": os.path.join(path_securedrop_root, + 'install_files/ansible-base/app-ssh.auth_private'), + "mon-ssh": os.path.join(path_securedrop_root, + 'install_files/ansible-base/mon-ssh.auth_private') +} +path_onion_auth_dir = '/var/lib/tor/onion_auth' # load torrc_additions if os.path.isfile(path_torrc_additions): - torrc_additions = open(path_torrc_additions).read() + with io.open(path_torrc_additions) as f: + torrc_additions = f.read() else: sys.exit('Error opening {0} for reading'.format(path_torrc_additions)) # load torrc if os.path.isfile(path_torrc_backup): - torrc = open(path_torrc_backup).read() + with io.open(path_torrc_backup) as f: + torrc = f.read() else: if os.path.isfile(path_torrc): - torrc = open(path_torrc).read() + with io.open(path_torrc) as f: + torrc = f.read() else: sys.exit('Error opening {0} for reading'.format(path_torrc)) # save a backup - open(path_torrc_backup, 'w').write(torrc) + with io.open(path_torrc_backup, 'w') as f: + f.write(torrc) # append the additions -open(path_torrc, 'w').write(torrc + torrc_additions) +with io.open(path_torrc, 'w') as f: + f.write(torrc + torrc_additions) + +# check for v3 aths files +v3_authfiles_present = False +for f in paths_v3_authfiles.values(): + if os.path.isfile(f): + v3_authfiles_present = True + +# if there are v3 authfiles, make dir and copy them into place +debian_tor_uid = pwd.getpwnam("debian-tor").pw_uid +debian_tor_gid = grp.getgrnam("debian-tor").gr_gid + +if not os.path.isdir(path_onion_auth_dir): + os.mkdir(path_onion_auth_dir) + +os.chmod(path_onion_auth_dir, 0o700) +os.chown(path_onion_auth_dir, debian_tor_uid, debian_tor_gid) -# reload tor +for key, f in paths_v3_authfiles.items(): + if os.path.isfile(f): + filename = os.path.basename(f) + new_f = os.path.join(path_onion_auth_dir, filename) + copyfile(f, new_f) + os.chmod(new_f, 0o400) + os.chown(new_f, debian_tor_uid, debian_tor_gid) + +# restart tor try: - subprocess.check_call(['systemctl', 'reload', '[email protected]']) + subprocess.check_call(['systemctl', 'restart', '[email protected]']) except subprocess.CalledProcessError: - sys.exit('Error reloading Tor') + sys.exit('Error restarting Tor') # Turn off "automatic-decompression" in Nautilus to ensure the original # submission filename is restored (see @@ -60,21 +109,49 @@ os.setresgid(amnesia_gid, amnesia_gid, -1) os.setresuid(amnesia_uid, amnesia_uid, -1) env = os.environ.copy() +env['XDG_CURRENT_DESKTOP'] = 'GNOME' +env['DESKTOP_SESSION'] = 'default' +env['DISPLAY'] = ':1' env['XDG_RUNTIME_DIR'] = '/run/user/{}'.format(amnesia_uid) env['XDG_DATA_DIR'] = '/usr/share/gnome:/usr/local/share/:/usr/share/' env['HOME'] = '/home/amnesia' env['LOGNAME'] = 'amnesia' -env['DBUS_SESSION_BUS_ADDRESS'] = 'unix:path=/run/user/{}/bus'.format(amnesia_uid) +env['DBUS_SESSION_BUS_ADDRESS'] = 'unix:path=/run/user/{}/bus'.format( + amnesia_uid) -# remove existing shortcut, recreate symlink and change metadata attribute to trust .desktop +# remove existing shortcut, recreate symlink and change metadata attribute +# to trust .desktop for shortcut in ['source.desktop', 'journalist.desktop']: subprocess.call(['rm', path_desktop + shortcut], env=env) - subprocess.call(['ln', '-s', path_persistent_desktop + shortcut, path_desktop + shortcut], env=env) - subprocess.call(['gio', 'set', path_desktop + shortcut, 'metadata::trusted', 'yes'], env=env) + subprocess.call(['ln', '-s', path_persistent_desktop + shortcut, + path_desktop + shortcut], env=env) + subprocess.call(['gio', 'set', path_desktop + shortcut, + 'metadata::trusted', 'true'], env=env) + +# in Tails 4, reload gnome-shell desktop icons extension to update with changes above +cmd = ["lsb_release", "--id", "--short"] +p = subprocess.check_output(cmd) +distro_id = p.rstrip() +if distro_id == 'Debian' and os.uname()[1] == 'amnesia': + subprocess.call(['gnome-shell-extension-tool', '-r', 'desktop-icons@csoriano'], env=env) # reacquire uid0 and notify the user -os.setresuid(0,0,-1) -os.setresgid(0,0,-1) +os.setresuid(0, 0, -1) +os.setresgid(0, 0, -1) +success_message = 'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.' # noqa: E501 subprocess.call(['tails-notify-user', 'SecureDrop successfully auto-configured!', - 'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.']) + success_message]) + +# As the amnesia user, check for SecureDrop workstation updates. +os.setresgid(amnesia_gid, amnesia_gid, -1) +os.setresuid(amnesia_uid, amnesia_uid, -1) +output = subprocess.check_output([path_securedrop_admin_venv, + path_securedrop_admin_init, + '--root', path_securedrop_root, + 'check_for_updates'], env=env) + +flag_location = "/home/amnesia/Persistent/.securedrop/securedrop_update.flag" +if b'Update needed' in output or os.path.exists(flag_location): + # Start the SecureDrop updater GUI. + subprocess.Popen(['python3', path_gui_updater], env=env) diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py new file mode 100644 --- /dev/null +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -0,0 +1,325 @@ +#!/usr/bin/python +from PyQt5 import QtGui, QtWidgets +from PyQt5.QtCore import QThread, pyqtSignal +import subprocess +import os +import re +import pexpect +import socket +import sys + +from journalist_gui import updaterUI, strings, resources_rc # noqa + + +FLAG_LOCATION = "/home/amnesia/Persistent/.securedrop/securedrop_update.flag" # noqa +ESCAPE_POD = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') + + +def password_is_set(): + + pwd_flag = subprocess.check_output(['passwd', '--status']).decode('utf-8').split()[1] + + if pwd_flag == 'NP': + return False + return True + + +def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # noqa + + # Null byte triggers abstract namespace + IDENTIFIER = '\0' + name + ALREADY_BOUND_ERRNO = 98 + + app.instance_binding = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + try: + app.instance_binding.bind(IDENTIFIER) + except OSError as e: + if e.errno == ALREADY_BOUND_ERRNO: + err_dialog = QtWidgets.QMessageBox() + err_dialog.setText(name + strings.app_is_already_running) + err_dialog.exec() + sys.exit() + else: + raise + + +class SetupThread(QThread): + signal = pyqtSignal('PyQt_PyObject') + + def __init__(self): + QThread.__init__(self) + self.output = "" + self.update_success = False + self.failure_reason = "" + + def run(self): + sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin' + update_command = [sdadmin_path, 'setup'] + + # Create flag file to indicate we should resume failed updates on + # reboot. Don't create the flag if it already exists. + if not os.path.exists(FLAG_LOCATION): + open(FLAG_LOCATION, 'a').close() + + try: + self.output = subprocess.check_output( + update_command, + stderr=subprocess.STDOUT).decode('utf-8') + if 'Failed to install' in self.output: + self.update_success = False + self.failure_reason = strings.update_failed_generic_reason + else: + self.update_success = True + except subprocess.CalledProcessError as e: + self.output += e.output.decode('utf-8') + self.update_success = False + self.failure_reason = strings.update_failed_generic_reason + result = {'status': self.update_success, + 'output': self.output, + 'failure_reason': self.failure_reason} + self.signal.emit(result) + + +# This thread will handle the ./securedrop-admin update command +class UpdateThread(QThread): + signal = pyqtSignal('PyQt_PyObject') + + def __init__(self): + QThread.__init__(self) + self.output = "" + self.update_success = False + self.failure_reason = "" + + def run(self): + sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin' + update_command = [sdadmin_path, 'update'] + try: + self.output = subprocess.check_output( + update_command, + stderr=subprocess.STDOUT).decode('utf-8') + if "Signature verification successful" in self.output: + self.update_success = True + else: + self.failure_reason = strings.update_failed_generic_reason + except subprocess.CalledProcessError as e: + self.update_success = False + self.output += e.output.decode('utf-8') + if 'Signature verification failed' in self.output: + self.failure_reason = strings.update_failed_sig_failure + else: + self.failure_reason = strings.update_failed_generic_reason + result = {'status': self.update_success, + 'output': self.output, + 'failure_reason': self.failure_reason} + self.signal.emit(result) + + +# This thread will handle the ./securedrop-admin tailsconfig command +class TailsconfigThread(QThread): + signal = pyqtSignal('PyQt_PyObject') + + def __init__(self): + QThread.__init__(self) + self.output = "" + self.update_success = False + self.failure_reason = "" + self.sudo_password = "" + + def run(self): + tailsconfig_command = ("/home/amnesia/Persistent/" + "securedrop/securedrop-admin " + "tailsconfig") + try: + child = pexpect.spawn(tailsconfig_command) + child.expect('SUDO password:') + self.output += child.before.decode('utf-8') + child.sendline(self.sudo_password) + child.expect(pexpect.EOF) + self.output += child.before.decode('utf-8') + child.close() + + # For Tailsconfig to be considered a success, we expect no + # failures in the Ansible output. + if child.exitstatus: + self.update_success = False + self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa + else: + self.update_success = True + except pexpect.exceptions.TIMEOUT: + self.update_success = False + self.failure_reason = strings.tailsconfig_failed_sudo_password + + except subprocess.CalledProcessError: + self.update_success = False + self.failure_reason = strings.tailsconfig_failed_generic_reason + result = {'status': self.update_success, + 'output': ESCAPE_POD.sub('', self.output), + 'failure_reason': self.failure_reason} + self.signal.emit(result) + + +class UpdaterApp(QtWidgets.QMainWindow, updaterUI.Ui_MainWindow): + + def __init__(self, parent=None): + super(UpdaterApp, self).__init__(parent) + self.setupUi(self) + self.statusbar.setSizeGripEnabled(False) + self.output = strings.initial_text_box + self.plainTextEdit.setPlainText(self.output) + self.update_success = False + + pixmap = QtGui.QPixmap(":/images/static/banner.png") + self.label_2.setPixmap(pixmap) + self.label_2.setScaledContents(True) + + self.progressBar.setProperty("value", 0) + self.setWindowTitle(strings.window_title) + self.setWindowIcon(QtGui.QIcon(':/images/static/securedrop_icon.png')) + self.label.setText(strings.update_in_progress) + + self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), + strings.main_tab) + self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), + strings.output_tab) + + # Connect buttons to their functions. + self.pushButton.setText(strings.install_later_button) + self.pushButton.setStyleSheet("""background-color: lightgrey; + min-height: 2em; + border-radius: 10px""") + self.pushButton.clicked.connect(self.close) + self.pushButton_2.setText(strings.install_update_button) + self.pushButton_2.setStyleSheet("""background-color: #E6FFEB; + min-height: 2em; + border-radius: 10px;""") + self.pushButton_2.clicked.connect(self.update_securedrop) + self.update_thread = UpdateThread() + self.update_thread.signal.connect(self.update_status) + self.tails_thread = TailsconfigThread() + self.tails_thread.signal.connect(self.tails_status) + self.setup_thread = SetupThread() + self.setup_thread.signal.connect(self.setup_status) + + # At the end of this function, we will try to do tailsconfig. + # A new slot will handle tailsconfig output + def setup_status(self, result): + "This is the slot for setup thread" + self.output += result['output'] + self.update_success = result['status'] + self.failure_reason = result['failure_reason'] + self.progressBar.setProperty("value", 60) + self.plainTextEdit.setPlainText(self.output) + self.plainTextEdit.setReadOnly = True + if not self.update_success: # Failed to do setup + self.pushButton.setEnabled(True) + self.pushButton_2.setEnabled(True) + self.update_status_bar_and_output(self.failure_reason) + self.progressBar.setProperty("value", 0) + self.alert_failure(self.failure_reason) + return + self.progressBar.setProperty("value", 70) + self.call_tailsconfig() + + # This will update the output text after the git commands. + def update_status(self, result): + "This is the slot for update thread" + self.output += result['output'] + self.update_success = result['status'] + self.failure_reason = result['failure_reason'] + self.progressBar.setProperty("value", 40) + self.plainTextEdit.setPlainText(self.output) + self.plainTextEdit.setReadOnly = True + if not self.update_success: # Failed to do update + self.pushButton.setEnabled(True) + self.pushButton_2.setEnabled(True) + self.update_status_bar_and_output(self.failure_reason) + self.progressBar.setProperty("value", 0) + self.alert_failure(self.failure_reason) + return + self.progressBar.setProperty("value", 50) + self.update_status_bar_and_output(strings.doing_setup) + self.setup_thread.start() + + def update_status_bar_and_output(self, status_message): + """This method updates the status bar and the output window with the + status_message.""" + self.statusbar.showMessage(status_message) + self.output += status_message + '\n' + self.plainTextEdit.setPlainText(self.output) + + def call_tailsconfig(self): + # Now let us work on tailsconfig part + if self.update_success: + # Get sudo password and add an enter key as tailsconfig command + # expects + sudo_password = self.get_sudo_password() + if not sudo_password: + self.update_success = False + self.failure_reason = strings.missing_sudo_password + self.on_failure() + return + self.tails_thread.sudo_password = sudo_password + '\n' + self.update_status_bar_and_output(strings.updating_tails_env) + self.tails_thread.start() + else: + self.on_failure() + + def tails_status(self, result): + "This is the slot for Tailsconfig thread" + self.output += result['output'] + self.update_success = result['status'] + self.failure_reason = result['failure_reason'] + self.plainTextEdit.setPlainText(self.output) + self.progressBar.setProperty("value", 80) + if self.update_success: + # Remove flag file indicating an update is in progress + os.remove(FLAG_LOCATION) + self.update_status_bar_and_output(strings.finished) + self.progressBar.setProperty("value", 100) + self.alert_success() + else: + self.on_failure() + + def on_failure(self): + self.update_status_bar_and_output(self.failure_reason) + self.alert_failure(self.failure_reason) + # Now everything is done, enable the button. + self.pushButton.setEnabled(True) + self.pushButton_2.setEnabled(True) + self.progressBar.setProperty("value", 0) + + def update_securedrop(self): + if password_is_set(): + self.pushButton_2.setEnabled(False) + self.pushButton.setEnabled(False) + self.progressBar.setProperty("value", 10) + self.update_status_bar_and_output(strings.fetching_update) + self.update_thread.start() + else: + self.pushButton_2.setEnabled(False) + pwd_err_dialog = QtWidgets.QMessageBox() + pwd_err_dialog.setText(strings.no_password_set_message) + pwd_err_dialog.exec() + + def alert_success(self): + self.success_dialog = QtWidgets.QMessageBox() + self.success_dialog.setIcon(QtWidgets.QMessageBox.Information) + self.success_dialog.setText(strings.finished_dialog_message) + self.success_dialog.setWindowTitle(strings.finished_dialog_title) + self.success_dialog.show() + + def alert_failure(self, failure_reason): + self.error_dialog = QtWidgets.QMessageBox() + self.error_dialog.setIcon(QtWidgets.QMessageBox.Critical) + self.error_dialog.setText(self.failure_reason) + self.error_dialog.setWindowTitle(strings.update_failed_dialog_title) + self.error_dialog.show() + + def get_sudo_password(self): + sudo_password, ok_is_pressed = QtWidgets.QInputDialog.getText( + self, "Tails Administrator password", strings.sudo_password_text, + QtWidgets.QLineEdit.Password, "") + if ok_is_pressed and sudo_password: + return sudo_password + else: + return None diff --git a/journalist_gui/journalist_gui/__init__.py b/journalist_gui/journalist_gui/__init__.py new file mode 100644 diff --git a/journalist_gui/journalist_gui/resources_rc.py b/journalist_gui/journalist_gui/resources_rc.py new file mode 100644 --- /dev/null +++ b/journalist_gui/journalist_gui/resources_rc.py @@ -0,0 +1,1030 @@ +# -*- coding: utf-8 -*- + +# Resource object code +# +# Created by: The Resource Compiler for PyQt5 (Qt v5.9.5) +# +# WARNING! All changes made in this file will be lost! + +from PyQt5 import QtCore + +qt_resource_data = b"\ +\x00\x00\x14\x41\ +\x89\ +\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ +\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ +\x00\x00\x0c\x82\x7a\x54\x58\x74\x52\x61\x77\x20\x70\x72\x6f\x66\ +\x69\x6c\x65\x20\x74\x79\x70\x65\x20\x65\x78\x69\x66\x00\x00\x78\ +\xda\xad\x98\x69\x76\xe4\x38\x0e\x84\xff\xf3\x14\x73\x04\x71\x01\ +\x41\x1e\x87\xeb\x7b\x73\x83\x39\xfe\x7c\xa0\x94\xe9\xa5\x5c\xdd\ +\x5d\xf3\xc6\x69\x5b\x4a\xa5\x92\x0b\x22\x10\x08\xc8\xad\xff\xfc\ +\x7b\xbb\x7f\xf1\x13\x45\x82\x4b\xa2\x25\xd7\x9c\x2f\x7e\x52\x4d\ +\x35\x34\x4e\xca\x75\xff\xdc\x47\x7f\xa5\xf3\xff\xfc\x84\xe7\x23\ +\xde\x7f\xb9\xee\xde\x1f\x04\x2e\x45\x8e\xf1\x7e\x9b\xd7\x73\x7f\ +\xe3\xba\x7c\x7c\x41\xd3\x73\xbd\x7f\xbd\xee\x74\x3c\xe3\x94\x67\ +\x20\x7f\x7d\x99\x3a\xda\xcc\x76\x3e\x9f\x45\x3e\x03\xc5\x70\x5f\ +\xf7\xcf\x7b\x57\x9f\x2f\xb4\xfc\x69\x3b\xcf\x5f\x18\xcf\xb0\xaf\ +\x6d\x7d\x7b\x9f\x94\x60\x4c\x61\xbc\x18\x5c\x58\xd1\xc7\xeb\xfc\ +\x0f\xf7\x4c\x91\x55\xc4\x1a\x1b\x47\xe5\x7f\x88\x39\xd8\x15\x3b\ +\xf7\x31\xf1\x3f\x46\xfd\x35\x7e\xee\x1d\xba\x1f\x02\xf8\x3e\xfb\ +\x16\xbf\xeb\xb5\xb2\xf8\x11\x8e\x7b\xa0\xd7\xb6\xf2\xb7\x38\x3d\ +\xd7\xbd\x7c\xbb\x1e\xdf\xd3\x84\x2f\x2b\xf2\xe1\x3d\x73\xf8\xbc\ +\xa2\xa1\xd7\xb8\x3e\xff\x7c\x8a\xdf\xde\xb3\xec\xbd\xee\xdd\xb5\ +\x94\x1d\xe1\xca\xcf\xa6\x5e\x5b\x39\x67\xdc\xd8\x09\x67\x3c\x5f\ +\xcb\xbc\x94\x3f\xe1\x5c\xcf\xab\xf2\x2a\x57\xbb\x06\xa8\x4d\xa6\ +\xeb\xee\xea\xbc\xa9\x3e\x10\xf1\xed\x93\x9f\xbe\xf9\xed\xd7\x39\ +\x0e\x3f\x58\x62\x0a\x2b\x28\xc7\x10\x46\x88\xe7\x5a\x89\x1a\x6a\ +\x18\x07\x94\x64\x2f\xbf\x83\x3a\xf0\x99\xb1\x80\xcd\x00\xb9\xc8\ +\xe5\xf0\x5e\x8b\x3f\xf3\x56\x9b\x8f\xc9\x0a\x33\x4f\xcf\x9d\xc1\ +\x33\x18\x18\x7f\x7d\xb9\xef\x17\xfe\xd7\xd7\x97\x81\xf6\xb6\xd8\ +\x7a\x7f\x95\x77\xac\x58\x57\x30\x7e\xb1\x0c\x43\xce\xfe\x73\x17\ +\x80\xf8\xfd\xc4\x54\x4e\x7c\xbd\xbb\x0f\xd7\xf7\x1f\x03\x36\x82\ +\xa0\x9c\x30\x17\x36\xd8\xae\x7e\x0f\xd1\xc5\x7f\x70\x2b\x1e\x9c\ +\xe3\x25\x8e\x5b\xd3\x75\xe7\x8b\xd7\xf9\x0c\x40\x88\x98\x5b\x58\ +\x8c\x8f\x20\x70\x65\x1f\xc5\x67\x7f\x69\x08\xea\x3d\x71\x2c\xe0\ +\xd3\x58\x79\x88\x29\x74\x10\xf0\xe2\x24\x4c\x56\x19\x52\x24\x13\ +\x34\x94\x60\x73\xf3\x1d\xf5\xe7\xde\x20\xe1\xbe\x8c\xbc\x00\x84\ +\xc4\x4c\xaa\x14\x4b\x20\xc0\x4a\x49\x52\x26\xdf\x0a\x14\x6a\x4e\ +\xa2\x24\x11\xc9\xa2\x52\xa4\x4a\xcb\x31\xa7\x2c\x39\x67\xcd\xa6\ +\x53\x4d\xa3\x26\x15\xcd\xaa\x5a\xb4\x6a\x2b\xb1\xa4\x22\x25\x17\ +\x2d\xa5\xd4\xd2\x6a\xa8\x11\x19\x13\x57\x73\xd5\x5a\x6a\xad\xad\ +\x31\x69\x4b\x8d\xb1\x1a\xf7\x37\x2e\xf4\xd0\x63\x4f\x5d\x7a\xee\ +\xda\x4b\xaf\xbd\x0d\xe8\x33\xd2\x90\x91\x87\x8e\x32\xea\x68\x33\ +\xcc\x38\x91\x00\x37\xf3\xd4\x59\x66\x9d\x6d\xf9\x05\x95\x56\x5a\ +\xb2\xf2\xd2\x55\x56\x5d\x6d\xc3\xb5\x1d\x77\xda\xb2\xf3\xd6\x5d\ +\x76\xdd\xed\x8d\xda\x83\xea\x57\xd4\xbe\x23\xf7\xd7\xa8\xf9\x07\ +\xb5\x70\x80\xb2\xfb\xf4\x03\x35\x2e\xab\xbe\x86\xf0\x26\x27\x62\ +\x98\x81\x58\x48\x1e\xc4\xd5\x10\x80\xd0\xc1\x30\xbb\x8a\x4f\x29\ +\x18\x72\x86\xd9\x55\x43\x74\x31\x4a\x60\x95\x62\xe0\x4c\x6f\x88\ +\x81\x60\x5a\x3e\xc8\xf6\x6f\xec\x3e\x90\xfb\x2d\x6e\x8e\xe8\xfe\ +\x29\x6e\xe1\x27\xe4\x9c\x41\xf7\xff\x40\xce\x19\x74\x9f\x90\xfb\ +\x15\xb7\x1f\x50\x9b\xed\xc8\x6d\x3c\x00\x59\x16\x12\x53\x14\x32\ +\x92\x7e\xdc\xb0\x4a\x0b\xa5\x59\x5d\xfa\xfb\x63\x5f\x7b\x73\xba\ +\xcb\x15\xfa\x94\xd6\xe6\xcc\xae\xb2\x98\xb8\x50\x9c\xba\x45\x57\ +\x9e\x4c\xdc\x3d\x8b\x28\x6d\xc8\x66\x3f\x44\xa9\x7b\xfb\x2c\x73\ +\xc1\xcb\x5c\x2d\xe8\xd5\x73\x0e\x53\xeb\x25\xda\xc7\x94\xd0\xd6\ +\x35\x1c\xc5\xba\x2f\x30\x64\x12\x69\x93\xfd\x68\x36\x9c\x75\x8d\ +\xb0\xd7\x9a\x6b\x69\xa9\xd3\xcb\xea\x5d\x16\x98\x2c\x10\xdc\x67\ +\xd2\x13\xb4\xba\x46\x86\x01\x71\x4d\xb7\x36\xa1\x45\x79\x85\x5f\ +\xf5\x75\xa6\xfb\xd4\x4a\xe8\x1f\x1d\xdd\x4f\x1f\x68\x5b\x53\x7a\ +\xdb\xd9\x67\x89\xba\x3b\x51\xd6\x3d\x7c\x1c\x80\xdb\x74\x6b\x24\ +\x73\x88\x8f\x48\xeb\xb6\xe7\x4a\xa0\x96\x13\x56\xb6\x62\x67\xbf\ +\x36\x92\x24\x08\x72\x86\xbc\xae\xf7\x31\x13\xb9\xca\x69\x6d\xbd\ +\xcc\xd8\x7d\xd1\x7b\x97\xcc\x56\xd9\xb0\xe6\x18\x49\x91\x35\x77\ +\xd5\xb9\xba\xb4\x3d\xaf\x45\x64\x88\x01\x54\xd8\xa9\xcf\xb1\x7d\ +\x07\xa1\x59\x72\x1f\xda\x3a\xbc\x91\x45\x38\xda\x3c\x01\x25\x7a\ +\x6d\x16\x19\x44\x4b\xfb\xaa\x2e\xf7\x67\x2d\x23\x19\xb3\xbf\xaf\ +\xe6\x2f\x8e\xca\xd4\x86\x4c\xb4\xd8\x17\x07\x64\xe0\x3b\xaa\xc1\ +\xad\x1d\x60\x2b\x04\x1e\xcd\xa8\x38\x8d\x87\x12\x47\xd7\x38\x2e\ +\x60\xe8\x3b\xf9\x9e\x96\xd1\xa9\x93\xcd\x76\x9c\xbb\x8d\xdc\x26\ +\x74\x75\x9a\x29\x57\xb9\xc2\x25\x16\xd8\x72\x10\xae\x33\x97\x08\ +\x14\xf6\x4d\x09\xc2\x5e\x43\x57\xdc\xa7\x6a\x6c\xc9\xbe\xe6\xd9\ +\x17\x44\x21\xe0\x57\xda\x0c\x38\x6a\xdb\xcd\x79\xf6\x16\x1a\x2b\ +\xc8\x7d\x13\x95\xa1\x56\xcf\x19\x20\x6f\x98\xb9\xa9\xdc\x25\x68\ +\x31\x2a\xe6\xdc\xba\xb1\xa7\x72\x57\xf0\xad\x7f\x43\xc7\x25\x24\ +\x84\x95\xb6\x9e\x2d\x49\xd5\x77\x84\x5c\x72\x1c\x9e\xb7\x3b\xf7\ +\xd6\x80\xe7\xea\xa3\xe5\x49\xb6\xed\x63\x0a\x18\x9a\x18\xc4\x04\ +\x61\x9f\xa5\x32\xb2\x6b\x27\x77\x86\xc6\x99\xcf\xae\xb1\x80\xbf\ +\x1e\x21\x3d\x4a\x40\x62\x00\xcf\xbe\xa4\x44\x28\x36\xa6\x41\xb7\ +\x33\xb9\x30\x76\x0e\x6e\x93\x0d\xb3\x4d\xa3\xb7\xb0\xae\x6a\xcb\ +\x5d\x85\x50\xc5\x22\xfd\x1e\xaa\x96\xf2\xce\xd8\x3e\xd1\x95\x91\ +\x8c\x2e\xa4\xd9\xc0\x51\x28\x54\x42\x3f\x10\xff\x4e\xe2\xab\xae\ +\xab\xc1\x87\x40\xda\x06\x4b\xdb\xb5\x0c\xe0\x80\xc4\x5c\x53\xc7\ +\x4a\x0d\x65\x9c\x1b\x38\x97\xd8\xd0\x06\x1e\xa3\xef\x10\xb9\xc0\ +\x3a\x4b\x74\xe4\x19\x2b\x27\xaa\x95\xcb\xb2\xe7\xbc\xe3\xed\x99\ +\x05\xc3\xcb\x8e\xa1\xff\x1e\x91\x4b\x75\x99\x94\x6a\x23\x08\xc2\ +\x41\xf0\x61\x88\xe7\xac\x54\x04\x3e\x74\x99\x21\xd8\x9c\x1f\x99\ +\x1c\xef\x51\x2a\x52\x51\xd1\x0d\xac\x50\x0d\x05\x56\xcf\x03\x4e\ +\x7e\xa7\x8e\xaf\x8a\x2e\x77\x35\x35\x65\x73\x14\x95\x63\x89\x1c\ +\xd8\x48\x42\x07\xc9\xd1\x4a\x0e\x20\x21\x50\xf1\xc8\x52\x5c\x65\ +\x5f\xb3\x54\xd3\x46\xc4\xf4\xc1\x82\xac\x5c\xed\x05\x43\xec\x70\ +\x32\x1b\x91\x02\xcc\xae\x96\x88\x3b\x96\x29\x63\xa7\x8b\xa8\x23\ +\x6b\x6d\x15\x83\x3e\x9e\x0c\x1c\x81\xa9\x26\x9a\xbc\xef\xa1\xf4\ +\xa1\xf5\x17\x60\xdd\x33\x74\x6a\x7d\x36\x92\xd3\x73\x1f\x07\x32\ +\x94\x3d\xcf\xa6\x61\x77\xc2\xd6\xfc\xaa\x24\x15\x78\x8e\x65\x6f\ +\x66\xa4\x0e\x79\xfc\x87\x8e\x59\xd7\x66\x8d\xcd\x55\x19\x71\x62\ +\x2e\xc1\xdc\x4c\x25\x60\x76\xbe\x4b\x0c\xea\x91\xa0\x82\x3e\x5b\ +\x44\x89\x39\xbf\x7b\x8c\x05\xfc\x85\x5c\x32\x96\x6c\x73\x9a\x67\ +\x6c\xf4\xc8\xa4\xd4\x62\x47\xc9\xc9\xe5\x11\xca\xe4\x8b\x85\xb6\ +\x4a\xd9\x12\x80\xd7\x37\x51\xea\x68\xf7\xb9\xd5\xc9\x3e\xfb\xb4\ +\xb2\x48\xa9\x99\xb2\x00\x67\x65\x98\xe9\xfc\x32\xf2\x93\xb2\xb6\ +\xe4\x41\x95\x24\xb6\x8b\x8c\xbd\x41\x92\x15\xeb\xea\x2f\xf5\xf0\ +\x73\xdc\x57\x77\x6a\x90\xeb\x8d\x23\x5b\x14\xa7\x32\x14\xe0\x5b\ +\x0a\x3a\x5a\x1a\x8d\x82\x3c\xaf\x5c\xac\xc2\x75\x08\xbf\x7a\x91\ +\xb4\x32\xc3\xc9\x85\xc8\xdf\x5c\x68\xbe\x7e\x57\x2e\xef\x18\x84\ +\x7a\x32\x60\x45\xbf\x6c\x65\xe8\x2a\x1a\x31\x9e\xa4\x20\x09\x92\ +\x9d\x51\xba\x42\x5e\x23\xa6\xd2\xc5\xf4\x95\x5f\x59\x38\x08\xb4\ +\x37\x53\xf8\xc5\x4f\xc7\x80\xe1\x7a\xe5\x14\x5e\x10\x05\x42\x8f\ +\xd2\x7d\xa4\xd6\x77\x6a\xf0\x29\x9f\xe4\x4d\x85\xf9\xdd\xf8\xdb\ +\x9e\x60\x16\xe4\xf6\xa9\x40\x54\x5a\xd5\xe7\xf2\x0c\x10\xba\xdd\ +\xa1\x99\x70\x1c\xf6\xd4\x51\xbf\x08\x9f\xb2\x22\x12\x00\x18\x54\ +\xa8\x94\xfa\x21\x4c\x2e\x8d\x8a\x0d\xed\xa6\x3e\xf0\x50\x25\x6f\ +\xb2\x77\x74\xa8\xb0\x08\x0e\x72\xbb\xac\x98\x0f\x18\xd9\x1b\x89\ +\x4a\xd6\x6a\xec\xf2\xd0\xbc\x95\xfa\xa2\xa4\xbb\x5a\x81\x1a\x0b\ +\x69\x6b\x11\xe1\xee\x52\xba\xda\x52\xf8\xb8\x5e\xfa\x80\x97\x86\ +\x95\xfc\x65\x89\xc4\x70\x96\xbe\xd0\x6c\x8d\x4d\x97\x82\x25\xb9\ +\xa0\xe5\x76\xb9\x6c\x5f\x4e\xfe\xa1\xd6\x0b\x27\xb2\x94\x56\xc5\ +\xca\x96\xb1\xe3\xc5\x92\x90\xff\x8e\x24\xee\x2f\x58\xa2\x13\x3c\ +\x76\x98\xf3\x9c\xd0\xd6\xac\xdd\x23\x01\x38\xf5\xcf\x93\x40\xbb\ +\x7a\x4a\xed\xc2\xa9\xad\xee\xe4\x00\x84\x4e\x69\xd0\x04\x85\x2c\ +\xac\x4c\x9d\x40\x9c\x02\x62\xbe\xa2\x18\x8c\x10\x9f\xd2\x87\x0c\ +\x5c\x3b\xe6\x86\x46\xd4\x30\xad\x62\xf0\xf5\x00\xd9\x35\x3b\x84\ +\xc8\x4b\x3c\xde\x21\x12\x0a\x32\x07\x5d\xd1\x7a\xea\xd1\x82\x77\ +\x19\x15\x84\x8d\xc9\xd2\x8c\x10\x86\x40\x18\x02\x1a\x14\xd6\x63\ +\x16\x02\xe1\xee\x09\x19\x99\x8a\xc6\x90\x9a\xc8\x71\x95\x6e\x95\ +\x7a\xd5\x16\x71\x0d\x94\x4a\xac\x1c\x05\x65\xa2\x0c\xb8\x24\xe4\ +\x1f\x55\x01\x53\x40\x46\xe5\xcd\x0f\x16\xd3\xfe\xe5\xf9\x78\xba\ +\x2d\x7c\xf5\xa6\x31\x8d\x5f\x43\xcb\x12\xe9\x11\x3a\x6a\xb4\xa8\ +\x79\x6c\xf9\x42\x8a\x76\x85\x01\x9a\x58\xcf\x8e\x94\xcf\x54\xee\ +\x63\xec\x31\xef\xe3\xcb\xa6\xbb\x50\xe4\xb6\xb1\x12\x83\xb8\x5d\ +\x3f\x14\x27\x53\xc1\xdb\xee\x55\xc6\x9b\xd4\xa6\x66\x1b\x30\xdb\ +\x63\x27\x25\x7a\xf0\xd4\xe5\x0c\xd0\x39\xa9\x54\x84\x96\x9a\x10\ +\xcd\x7d\x36\x1b\x92\x4d\xb0\xb4\x41\x89\x62\xb3\xf0\x70\xb1\xcb\ +\x89\x29\xa0\x52\x20\x2d\x94\xdc\xc9\x47\x25\x2f\xe4\x87\xb0\xa9\ +\x2b\xc5\xb8\x5f\xac\xd4\x5f\xfc\x35\x04\xae\xa1\x38\xfe\x16\x2c\ +\xb4\x4f\x2e\x6e\xc0\x2d\x0f\xcb\xa4\x83\x50\xc7\x5f\x37\xb2\x32\ +\xa0\x4e\x62\xe6\xb8\x56\x1f\x5d\x18\xb7\x34\xcc\x8c\x14\x07\x2b\ +\x33\x53\xe9\x06\x76\x4a\x00\x3f\xcb\x1a\x69\xf6\xaa\xc6\x53\xe6\ +\x67\x81\xd8\x29\x2a\x22\x55\x82\x3a\x6f\x66\x11\x7d\x8b\x03\x44\ +\x1c\x16\x8a\x21\xe9\xbf\x71\x57\x5a\xf1\xb8\x64\x7d\xb6\x69\x63\ +\x52\x32\x87\x7c\x7f\x97\xb5\x34\xc9\x87\x69\x06\x75\xbf\xdd\x89\ +\x92\x32\xd8\xb4\x81\x63\xc3\x91\x05\x53\xfb\x94\xb0\x20\x0c\x4b\ +\x73\x29\x66\x57\xb1\x30\x03\xe9\xa3\x88\xd9\x01\x36\x7b\x85\x81\ +\xb4\x2a\x08\xc4\x37\xb5\xa9\xb3\x76\xf7\x96\x1b\x5d\x7f\x26\xb3\ +\x38\x3e\x36\x5f\xac\x49\xc1\xe9\x11\xec\x1d\xa5\xcc\x55\x46\xaf\ +\xb4\x0d\x93\x3c\x61\x3b\x14\x1e\x42\xca\x74\xd4\xe6\x0b\xa3\xb5\ +\x2c\x1d\x48\x9e\xb6\x4f\x96\x90\x2d\xcb\x63\xd3\xb8\xd5\x00\x80\ +\x61\x3d\x52\x20\x0b\x05\xaa\x58\x91\x17\xb2\x1b\xee\xe6\xd5\x16\ +\xa1\xc4\x79\x22\x1a\x35\x9a\x26\x4a\xb1\xe6\xe4\xa4\xbf\xc6\xeb\ +\x68\x0c\xec\xa6\x54\x9b\xf5\x40\x00\xf9\x86\x93\x5c\xd3\xa0\x33\ +\xca\xf9\x66\x77\x47\x95\x28\x5b\x8c\x8f\xa1\xa3\xcd\x79\x4e\xc6\ +\x18\x11\x01\x1a\x7b\x93\xd9\x8b\xfa\x67\xb9\x8f\xe6\x9a\xca\x41\ +\xf2\xa9\x88\xbf\x9d\x55\xb6\xfa\xa1\x32\x95\x7d\x56\xf3\x19\x16\ +\xbc\x47\x87\x51\xc6\x9a\x09\xb7\x39\x56\xf6\xec\x8f\xab\xd7\x61\ +\x89\xd7\xfc\xa6\xf1\x83\x9b\x28\xb1\x15\xdb\x2e\x73\x8a\x1d\xb0\ +\x61\x07\xa1\xf0\x3b\x84\x7e\x02\xc8\xfd\x33\x84\x3e\x24\xce\x5b\ +\x8d\xc7\x70\x30\xc6\x86\x76\xe4\x9a\xe6\x85\xc9\x77\x56\x73\xac\ +\xd3\xb2\x4e\x92\xba\xf5\xca\x0d\x4f\xec\x5f\x10\x0c\x73\x7c\x56\ +\x2a\xdb\xc9\x79\x56\x42\x05\x9c\x94\xf2\x78\x09\x6e\x52\x0c\x81\ +\xe5\xe8\x78\x0b\x92\x67\x39\x4f\x4b\x32\x36\xf8\xd0\xa2\x5a\xa3\ +\x72\xb2\xfd\x03\x96\x83\xca\x40\x13\xeb\x2d\x06\x8d\xf3\xd8\xcc\ +\x62\x07\xa0\x51\x97\x5f\x67\x74\x26\x2f\x49\x86\xf8\x97\xc7\x1b\ +\xdf\x81\x67\x1b\x94\x26\xb6\x49\x87\x0c\x8d\xc8\xc5\x9a\x23\x26\ +\xe9\x08\x4f\xac\xdd\x48\xb7\xc4\x9d\xa8\xd1\x30\xd1\x51\x72\x2b\ +\x02\x82\xdd\xc7\x02\xe2\x21\x71\x28\x9d\x2e\x76\x9f\xa7\x85\xe2\ +\xc7\x05\x0a\x6b\x3f\x8e\xdf\x52\x74\x9a\x7f\x63\xbb\xf0\x70\x65\ +\x07\x5e\xbb\xa4\x8d\xf2\xae\x64\x5b\xc4\xa9\x51\xe7\xea\x88\x46\ +\xf4\x31\x24\x55\xa3\x7e\x31\x33\x4c\x5a\x22\x9d\x76\xa4\xb9\xa6\ +\x8d\xa0\xc9\xb6\x69\x20\x16\xe6\xc3\xfd\xad\x38\x7e\xc4\x6b\x9a\ +\x2f\x4c\xe7\x03\x4a\x0d\x42\xbd\xfa\xb2\x26\x0e\x43\x23\xd8\x1a\ +\xfa\x03\x30\x48\x88\x20\x72\x85\xde\x2d\x43\x91\xb8\x22\x95\xd5\ +\x9a\x1b\x8e\x4b\x19\xe6\x82\x0b\x74\x6b\x1a\xd2\x3d\xa5\x3d\xd8\ +\x83\x34\xe7\x29\x01\x6e\xd6\x35\xcb\xb0\x42\xa3\x2c\xc7\xf8\x0b\ +\xef\x63\xfc\x83\x8c\x39\x5b\x23\x67\xdc\x2b\x69\x18\x26\x5b\x2d\ +\x63\x77\xbb\x57\xf4\xfc\xb2\x70\x11\x9c\x13\xbe\xd9\xbc\x35\x20\ +\x74\xb7\x04\xd9\x4a\x1d\x86\xc1\x5a\xcd\xc7\x5b\x62\x87\x9d\xa9\ +\xfd\xb4\x76\xa0\x6d\xcd\xd4\x28\x3e\x94\x53\xa2\x36\xc5\xc8\x12\ +\x00\x5f\xf3\x32\xd9\x64\x63\x8a\xd8\xdd\x98\xef\xd0\x73\x07\xf5\ +\x14\x4d\xc4\xdb\x3a\xac\xeb\xcd\x9d\x1c\x29\xa0\xb4\xcf\xbf\x8b\ +\xfc\x59\x57\x67\x63\x66\x49\x6e\x23\xef\xd5\x1a\x17\x74\x65\x26\ +\xb2\x9f\x9a\x29\xd4\xcc\xb5\x2c\x24\x7e\x75\xa8\x81\x62\x5d\xdb\ +\xee\x9c\xc7\xf2\x53\x6b\x47\xca\xde\x7a\xe6\x6e\x6b\x7d\xa2\xdd\ +\xf0\x96\x8c\xb1\xac\x9b\x09\xd5\x45\xca\x4a\x35\xeb\x9f\x58\x38\ +\x0a\xbd\x3d\xbd\xad\x39\x19\x13\x6b\xac\xb9\x51\x10\x53\x37\x23\ +\x42\x47\x0a\x52\x54\xc2\x2d\x2b\x48\xfc\xf8\x54\x93\xdc\x29\x4a\ +\x5f\x6a\x12\xbd\x02\x49\x4e\x45\x8a\xac\x28\xcd\xf3\x14\xa3\x95\ +\x63\xc4\x1b\xd4\x2f\x64\xc1\xd3\x00\x54\xab\x45\x51\x06\x89\x9d\ +\x5d\xb7\xb6\xe3\x32\x03\xb1\x2d\x7e\x7b\x94\xc5\x9d\x65\x58\x94\ +\x09\x3f\x10\xc5\x27\x62\xf3\x8a\xe5\x3c\xd6\xf2\xcd\x9e\x6d\x55\ +\xb3\x93\x08\x16\xa6\x05\x76\x98\xf5\x83\xb7\x34\xec\xf4\x73\xb4\ +\xde\x72\x7b\x0c\xb6\x10\x33\x26\xa3\xc8\x77\x8f\xf1\xdb\x87\x41\ +\xee\xdd\x63\xf6\x13\x6b\xfe\xb5\x61\x0f\x49\x5f\x4f\x23\x56\xbe\ +\x03\x6d\x88\xbc\x30\x99\xe6\x14\xa9\x12\xc4\x96\xb2\x19\x8f\x35\ +\xb5\x81\x6a\x0d\xef\x27\x2c\xdf\x2a\xfa\xe7\xe0\x7d\x7e\x0c\x31\ +\x70\xc1\x8d\xbb\xcc\xc2\x03\x04\x4c\x71\xcb\x8c\x31\xb2\x68\xac\ +\x05\xbb\x47\x36\x22\xbb\x42\xeb\xb7\x6e\x6b\xfd\xaf\xab\x3c\x53\ +\xa9\x39\xdd\xf6\xd3\x93\x1f\xf7\x27\x8f\x8a\x94\x8a\xc9\x9a\xfd\ +\x81\x5f\x71\xa3\xf1\xa3\x07\xa6\xcd\x42\xd9\x71\x47\xf3\x79\x46\ +\xf4\xb2\x55\x5f\xcb\xdf\xe7\xea\x27\x84\xbc\x37\xe9\xdd\x63\x12\ +\x85\xd6\x00\x7b\x24\xe8\x91\xc9\xf9\x35\x8c\x10\x6c\xe7\xdd\xde\ +\x01\x15\xae\x61\x9a\xb1\x3d\x56\x11\xcf\xe7\xa7\x4d\xb1\x83\xd2\ +\x1b\x0d\x10\xc6\x91\xf5\x4e\x3e\x22\xc7\xd8\x1c\xe0\xa7\x6c\x34\ +\xeb\x01\xd7\xeb\xf9\x98\x9f\xd7\x9f\x3f\x1f\x73\xff\x34\x3a\x80\ +\x9c\x4d\x02\xad\x2b\xf7\xf6\xd4\xd4\x2a\xc5\x0b\x9a\x41\xc9\x86\ +\xe6\xfb\xe3\x21\x4e\xb2\x1e\xbb\x1d\xfa\x59\xef\x6e\x4f\x30\x6e\ +\x1f\x8c\x5d\xf9\x81\xa2\xe9\x83\xa2\xee\x1f\x3d\xc0\xb4\xe3\xde\ +\x48\xd5\x75\xb9\xff\x02\x46\x5f\x6a\x9b\x3b\xf3\x0b\xf3\x00\x00\ +\x01\x84\x69\x43\x43\x50\x49\x43\x43\x20\x70\x72\x6f\x66\x69\x6c\ +\x65\x00\x00\x28\x91\x7d\x91\x3d\x48\xc3\x40\x1c\xc5\x5f\x53\xa5\ +\x2a\x55\x07\x3b\x88\x3a\x64\xa8\x4e\x16\x44\x45\x1c\xb5\x0a\x45\ +\xa8\x10\x6a\x85\x56\x1d\x4c\x2e\xfd\x10\x9a\x34\x24\x29\x2e\x8e\ +\x82\x6b\xc1\xc1\x8f\xc5\xaa\x83\x8b\xb3\xae\x0e\xae\x82\x20\xf8\ +\x01\xe2\xe2\xea\xa4\xe8\x22\x25\xfe\x2f\x29\xb4\x88\xf1\xe0\xb8\ +\x1f\xef\xee\x3d\xee\xde\x01\x42\xad\xc4\x34\xab\x6d\x0c\xd0\x74\ +\xdb\x4c\x25\xe2\x62\x26\xbb\x22\x86\x5e\x11\xc2\x20\x80\x1e\x74\ +\xca\xcc\x32\x66\x25\x29\x09\xdf\xf1\x75\x8f\x00\x5f\xef\x62\x3c\ +\xcb\xff\xdc\x9f\xa3\x5b\xcd\x59\x0c\x08\x88\xc4\x33\xcc\x30\x6d\ +\xe2\x75\xe2\xa9\x4d\xdb\xe0\xbc\x4f\x1c\x61\x45\x59\x25\x3e\x27\ +\x1e\x35\xe9\x82\xc4\x8f\x5c\x57\x3c\x7e\xe3\x5c\x70\x59\xe0\x99\ +\x11\x33\x9d\x9a\x23\x8e\x10\x8b\x85\x16\x56\x5a\x98\x15\x4d\x8d\ +\x78\x92\x38\xaa\x6a\x3a\xe5\x0b\x19\x8f\x55\xce\x5b\x9c\xb5\x52\ +\x85\x35\xee\xc9\x5f\x18\xce\xe9\xcb\x4b\x5c\xa7\x39\x84\x04\x16\ +\xb0\x08\x09\x22\x14\x54\xb0\x81\x12\x6c\xc4\x68\xd5\x49\xb1\x90\ +\xa2\xfd\xb8\x8f\x7f\xc0\xf5\x4b\xe4\x52\xc8\xb5\x01\x46\x8e\x79\ +\x94\xa1\x41\x76\xfd\xe0\x7f\xf0\xbb\x5b\x2b\x3f\x31\xee\x25\x85\ +\xe3\x40\xfb\x8b\xe3\x7c\x0c\x03\xa1\x5d\xa0\x5e\x75\x9c\xef\x63\ +\xc7\xa9\x9f\x00\xc1\x67\xe0\x4a\x6f\xfa\xcb\x35\x60\xfa\x93\xf4\ +\x6a\x53\x8b\x1e\x01\xbd\xdb\xc0\xc5\x75\x53\x53\xf6\x80\xcb\x1d\ +\xa0\xff\xc9\x90\x4d\xd9\x95\x82\x34\x85\x7c\x1e\x78\x3f\xa3\x6f\ +\xca\x02\x7d\xb7\x40\xd7\xaa\xd7\x5b\x63\x1f\xa7\x0f\x40\x9a\xba\ +\x4a\xde\x00\x07\x87\xc0\x48\x81\xb2\xd7\x7c\xde\xdd\xd1\xda\xdb\ +\xbf\x67\x1a\xfd\xfd\x00\xf6\x54\x72\x75\x62\xe4\xae\xb0\x00\x00\ +\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\ +\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x2e\x23\x00\x00\x2e\x23\ +\x01\x78\xa5\x3f\x76\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xe3\x07\ +\x1b\x00\x0e\x25\xbc\xbb\x36\xcb\x00\x00\x05\xb0\x49\x44\x41\x54\ +\x58\xc3\xd5\x57\x6b\x4c\x93\x67\x14\x7e\xbe\xb6\xf4\x42\x69\x0b\ +\xd4\x32\xa4\x88\x74\xe8\x18\x66\x82\xf3\x12\x32\x9d\x0e\x2f\x5c\ +\x9c\x6e\x0e\x0d\x66\x43\x51\x51\x67\x8c\x19\x6a\xc8\x2e\x71\xa8\ +\xd9\xe6\xdd\x65\xcc\x31\x75\xc8\x88\xb8\xc4\x1b\x61\x12\x5d\xa6\ +\xc3\x78\x61\x22\x26\xcc\xc9\x65\x41\x61\x88\xed\x70\x60\x29\xa5\ +\x94\x52\x7b\xef\xd7\xef\xdd\x0f\x05\x5b\xa1\xb6\xb0\x4b\xb2\xf3\ +\xf3\xbc\xe7\x3d\xdf\xf3\x3d\xef\x73\xce\x7b\x5e\x0a\x23\xb0\xf0\ +\xec\x2a\x19\x2f\x94\x5b\x2e\x90\x48\xa7\x99\x74\xea\xaf\x3b\x0a\ +\x5e\xfb\x00\x23\x34\x6a\x38\xc1\x84\x10\x2a\x7a\xf3\xf5\x43\x41\ +\xb2\x31\xeb\xd8\x5c\x01\xbb\xdf\x6f\x35\xea\x0c\x16\x83\x7a\xdd\ +\x83\x83\x29\x65\xff\x1a\x00\x79\x4e\xe5\x2a\x61\x70\x78\x01\x5f\ +\x14\x22\xf2\x92\x8a\x98\x7a\xda\x9b\x6d\xe6\x87\x4b\xd4\x07\x67\ +\xff\xfe\x8f\x01\x90\xe7\x54\x24\xf0\x84\xb2\xef\x83\xa4\x91\x31\ +\x20\xc4\x67\x3c\xe3\x72\x10\x93\xae\xfd\x1c\xcd\x50\x99\x1d\x5f\ +\x4e\xb7\x8e\x18\x40\xec\xea\x6a\x91\x3d\x98\x2a\x0b\x0a\x1b\x9b\ +\xc2\x62\x71\x06\xe2\xb8\x6c\x20\x26\x94\x05\x27\x61\x41\xa9\xa3\ +\x41\xbc\xec\x77\x58\x8c\x0e\xb3\x5e\xbd\xb3\xbd\x20\x69\xc7\xb0\ +\x01\x44\x6e\xaa\xdc\x2b\x92\x46\xe5\x06\xf0\x85\x01\xee\xfe\x99\ +\xe1\x06\x7c\xba\x28\x0a\x52\xb1\x00\x00\xd0\xdc\x61\xc0\x86\x72\ +\x0b\xf4\x76\xb6\xd7\x0f\x58\xfa\xba\xba\x6d\x0f\x3b\x57\x74\x14\ +\xa4\x55\xf8\x04\x30\x7a\xfd\x95\x74\x91\xec\xb9\x62\xbe\x68\x54\ +\xe8\x50\xc1\x67\xb3\x78\x50\x84\x07\x7b\xf8\xb6\x9f\xb9\x87\x73\ +\x2a\x91\x4f\xf9\x3e\xd4\x76\xd4\xd9\x2d\xfa\x74\xf5\xe1\xd4\x76\ +\xf7\x95\x01\xe8\x51\x9b\xaa\x6e\x49\xc7\xc6\x6d\xe6\xf0\x02\x03\ +\xbd\xa5\x69\x6e\xef\x41\x64\x90\x0b\x21\x42\x1e\xd8\x2c\x16\x28\ +\x8a\xc2\xac\xd8\x60\xcc\x1a\xe3\xc4\x0b\x21\x34\x64\x22\x0e\x08\ +\xc5\x86\xc1\xc2\x3c\x75\x34\x14\xc5\x13\x4a\x22\x78\xc2\xe0\x4d\ +\x44\x31\x0b\x96\xfa\xd2\x6b\x83\x18\x60\xcf\xdb\x41\x24\xe1\xd1\ +\x88\x7c\x69\x2e\xc0\x62\xf9\x54\x6f\x8c\xd8\x8a\x63\x59\x11\x10\ +\x07\x72\x07\x7c\x46\x8b\x1d\x87\xaf\xf5\xa0\xac\x89\x0d\x9a\xf1\ +\x8c\xa7\x1d\x36\x68\xd5\x4a\xf4\x1a\xba\xd5\xae\x93\x6f\xcb\xfb\ +\xfd\x1c\x37\x9a\xd0\xa7\xf9\x03\x46\x6d\x09\x64\x31\x53\x20\x53\ +\x4c\x7a\x26\x00\xa5\x51\x80\x9c\xf2\x3e\x14\x2e\x0d\x01\x8f\xc3\ +\xc6\xa9\x1b\xf7\x51\x54\xc7\x86\xc1\xc1\xf5\x24\x9f\x61\xa0\xd7\ +\xa8\xd0\xdd\xdb\x07\x06\x04\x78\xaa\x90\x38\x83\x4e\x8b\x71\x41\ +\xdb\x7a\x13\xfa\xfb\x8d\x88\x9c\x38\x07\x42\xa9\xdc\x2b\x88\x86\ +\x4e\x06\x39\xe5\x46\x18\x4c\x16\xb4\xf6\x0a\x06\xad\x9b\x7a\x1e\ +\x40\xd3\xad\x81\xc3\x45\xbc\xe6\xe0\x78\x5b\xa0\x1d\x56\xb4\xd5\ +\x9e\x87\x40\x1c\x86\xa8\x49\x29\xe0\xf0\x87\x96\xc6\xaf\xed\x4e\ +\x00\x1e\xc5\x02\xbb\xd9\x00\x8d\xba\x0d\x66\x07\xed\xf3\x28\x39\ +\xbe\x02\xac\x46\x2d\x5a\xaa\x4e\x20\x24\x62\x3c\x22\x26\x24\x01\ +\xcf\x90\x87\xcb\x69\x47\xb7\x5a\x05\xbd\xd9\x02\x10\xe2\x57\x27\ +\x1c\x32\xdd\xee\x35\x73\x30\x4e\xee\x5e\x89\x04\xbd\xea\xbb\x68\ +\xae\x3c\x0a\x7d\x7b\xd3\x50\x55\x06\x7d\xa7\x0a\xf7\xee\x35\x42\ +\x6f\x32\x7b\x7c\x7c\xf9\x54\x09\x0a\x33\x15\x10\xf3\x58\xfe\x03\ +\x98\x9f\x38\x0e\x8d\xc5\xeb\xb1\x7f\xdd\x3c\x88\xdc\x54\xce\xb8\ +\x68\x74\x36\x57\xa3\xa5\xea\x24\xac\x06\x2d\x00\xc0\xdc\xdb\x05\ +\x65\x4b\x3d\x34\x7a\x3d\x5c\x6e\xca\x4f\x8c\xe2\xe1\xc6\x96\xc9\ +\x28\x79\xef\x55\xac\x49\x99\x80\xe4\x38\xd1\xf0\x8e\x20\x80\xc3\ +\x46\x6e\xc6\x2b\x58\x3e\x2f\x1e\x5b\x4b\x2a\x51\x52\x51\x3f\xf0\ +\x63\xb4\xcd\x04\xd5\xcd\xb3\x08\x08\x89\x02\x1d\x38\xca\x63\xdf\ +\x68\x11\x0b\xbb\x17\x2b\xb0\x2c\x29\x16\x14\xf5\x44\xf1\x1c\x36\ +\xe5\x3f\x03\xfd\x56\x7c\xbe\x0e\x52\xb1\x00\x45\xb9\x0b\x71\xf3\ +\xf0\x5a\xcc\x9c\x18\xe5\x29\x54\xe6\x09\xd5\x5c\x36\xf0\x51\x72\ +\x18\x9a\xf7\xcf\xc5\xf2\xd9\x2f\x82\x61\x08\x0a\x7f\xba\x83\x8b\ +\xb5\x6d\xc3\xd7\xc0\x40\x99\x29\x35\x98\xb6\xa1\x18\x97\xeb\x54\ +\x78\x79\xdc\x68\x54\xe6\xaf\x04\xe5\xe5\xfa\x9a\x1e\xcd\xc7\xce\ +\x65\xd3\x20\x14\x70\x71\xa5\xe1\x4f\x4c\xfe\xf8\x32\x72\x4a\xdb\ +\x50\xd3\xaa\x1b\x3e\x80\xab\x75\x6d\x20\x84\xe0\xe0\xc6\xd7\x51\ +\xf2\xe1\x9b\xd8\x73\xa2\x1a\x8b\xb7\x97\x42\xa9\xd6\xfb\x54\xf5\ +\xed\x36\x1d\xd2\x0e\x34\xa2\xa9\xcb\x39\xf2\x2a\xf8\xb1\xe6\x2e\ +\x66\x6c\x3c\x8a\x9a\xa6\x0e\x24\xc4\x84\xe3\xca\x17\x2b\xb0\x2c\ +\x39\x1e\x6f\xe4\x9d\xf6\x59\x5d\x2e\xc6\xb3\x07\x73\x58\xd4\xf0\ +\xfb\x40\xfe\x86\x14\x58\x6c\x34\x72\xbf\xb9\x88\x98\x88\x50\xec\ +\x5d\x3b\x17\x4b\x66\xc6\x61\x41\xe2\x78\x88\x16\xee\xf1\xb7\xc4\ +\xa1\x08\x61\x63\xe9\x0c\x05\x00\xa0\xd7\x4c\xfb\xcf\x40\xbb\xd6\ +\x88\xc4\x38\x39\xaa\xbf\xca\x46\xf2\x14\x05\x56\x7f\x7e\x0e\x00\ +\xc0\xe7\xfa\xec\x5b\xa0\x28\x0a\x12\x3e\xb0\xef\xad\x48\xdc\xde\ +\x9f\x8c\xf1\xf2\x50\x34\x28\xb5\xb8\xae\x34\xfb\xcf\xc0\xa2\x6d\ +\xa7\xb1\x66\xfe\x64\xec\xc8\x4e\x42\x56\x72\x02\xde\x99\x33\xd1\ +\xef\x21\x73\x6c\x98\x08\x2d\xfb\x67\x43\x2a\x0e\x84\xdd\x41\x63\ +\x5f\x79\x03\xf6\x54\xa8\x61\x75\x92\x67\x33\xc0\xa2\x70\x1c\x14\ +\x98\xc7\x8d\x0d\xc5\x17\xea\x10\xbb\xea\x10\x0e\x9c\xa9\x01\xf1\ +\x97\x73\x00\x12\x21\x1f\x52\x71\x20\xce\xd6\xa8\x10\xbf\xe5\x2a\ +\xb6\xfe\xf0\x00\x66\x87\x5b\x03\xa1\xed\x79\x43\x02\x70\x5e\xda\ +\x9e\xc5\xb5\xb3\xa2\x01\xaa\x16\xa0\x08\x00\x18\xcd\x76\xbc\x5f\ +\x78\x09\x09\xef\x1e\xc1\x85\x5f\x5a\xfd\x02\x70\xe7\xbe\x0e\xa9\ +\xbb\x7e\x46\x46\x61\x33\x54\x3d\x8f\x2b\x81\xa1\x5d\x94\x55\xff\ +\xad\xf3\xf8\x12\xb1\xab\x6c\xc5\x31\x9f\x33\x21\x27\xf5\x93\x34\ +\xc2\xb0\xbe\x03\x41\x98\xbb\x3f\x75\x6a\x0c\x2e\xde\x52\x3e\xd9\ +\x2c\x19\x03\x04\xc9\x00\x00\x53\xe4\x01\x48\x7c\x5e\x82\xc2\x6a\ +\x1d\x06\xfa\x13\x01\x81\xad\xaf\x86\xc7\xa6\xd2\xcd\xa7\x32\xbb\ +\x86\x3d\x96\xb3\x92\x77\x6d\xa3\xc0\xe4\x81\x10\xde\x90\x9b\xdd\ +\x00\x0c\xba\x9f\xec\x26\x35\xe5\xb2\x64\xd2\x65\xab\xaf\xfd\xad\ +\x77\x41\x64\x46\xbe\x40\xd3\x6b\x3e\x49\x40\x16\x3d\x1d\x3f\x24\ +\x00\xda\x6e\x83\xc3\x94\x47\x97\xad\xcc\xff\x47\x5f\x46\xbc\xb4\ +\x7d\xb1\x4e\xda\x5e\x4e\x81\x8a\xeb\x9f\xab\x3c\x00\x30\x2e\x86\ +\xb2\x1b\x4b\xe3\x83\xe3\x56\xd6\x16\x4d\x75\xfa\x9b\x77\x58\x6f\ +\xc3\x47\xc3\xeb\x67\x4b\x01\x1c\x01\x10\xfc\x08\x40\x18\x81\xcd\ +\xf0\x1b\x87\x72\xa4\xdb\x4e\x67\xb7\xe1\xbf\x32\x56\xca\xae\x2d\ +\x9c\xf4\xa2\x7a\x76\x46\xf1\x02\xfc\x9f\xed\x2f\x83\x1b\x40\xee\ +\xc5\xe1\xab\x84\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ +\ +\x00\x00\x27\x71\ +\x89\ +\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ +\x00\x01\x90\x00\x00\x00\x3c\x08\x06\x00\x00\x00\x48\x16\xec\xed\ +\x00\x00\x0c\x6f\x7a\x54\x58\x74\x52\x61\x77\x20\x70\x72\x6f\x66\ +\x69\x6c\x65\x20\x74\x79\x70\x65\x20\x65\x78\x69\x66\x00\x00\x78\ +\xda\xad\x98\x6b\x96\x1b\xbb\x0d\x84\xff\x73\x15\x59\x02\x9f\x20\ +\xb9\x1c\x92\x00\xcf\xc9\x0e\xb2\xfc\x7c\x68\x69\x6c\xcf\xd8\xbe\ +\x37\x4e\x22\xcd\x48\xad\x56\x37\x1f\xa8\x42\xa1\xa0\x60\xff\xfa\ +\xe7\x0d\xff\xe0\x51\x72\x1f\xa1\xb6\x3e\x64\x8a\x44\x1e\x75\xd6\ +\x99\x17\x07\x23\xbe\x1e\xaf\xf7\x14\xeb\xf3\xfa\x3c\xe6\xc7\x51\ +\xfa\x7c\x3e\x94\xf3\xbe\x29\x73\xaa\xf0\x5e\x5e\x1f\xc5\x5e\xef\ +\x69\x71\xbe\x7d\xbf\xa1\xd7\xf7\xf9\xfd\xf9\x7c\xe8\xef\x81\xf2\ +\x78\x0f\x94\xbe\x0d\xfc\x3c\x8a\xcf\xec\xc7\xfa\x5e\xe4\x7b\xa0\ +\x92\x5f\xe7\xd3\xfb\x73\x98\xef\x1b\x96\xfc\xb0\x9d\xf7\x7f\x3e\ +\xdf\xb6\xf1\x1a\xfb\xcb\xe7\xda\x09\x86\x36\xc6\x2b\x39\x64\x2b\ +\xa9\xc4\xe7\x35\xbf\x66\x2a\xac\xa2\xcc\xb2\x78\xf7\xd7\x54\xf8\ +\x82\xd7\xc4\x71\x29\xed\x39\x93\x7f\x8e\x5f\x78\xaf\xe3\x97\x01\ +\xcc\xf2\xeb\xf8\xc5\x8f\x95\x95\xef\xe1\x78\x0d\xf4\x71\x83\x7c\ +\x89\xd3\xfb\x7c\x6a\x5f\xce\x97\x6f\xd3\xe7\x4f\x2b\x4a\xf9\x7d\ +\x49\xfe\xfe\x85\x3f\x76\x8b\x3b\xfe\xf8\xf8\x21\x7e\xf7\xea\xb8\ +\xd7\x5e\xbb\x5b\x55\x02\xe1\x92\xf7\xa6\x3e\xb6\xf8\x1c\x71\x21\ +\x83\xd4\xf2\xdc\x26\x3c\x3b\xff\x8d\xe3\xfe\x3c\x27\xcf\x11\x57\ +\x3c\xa0\xa6\x6c\x75\x07\xe6\x3c\x69\xa6\x4c\xc4\x6f\xaa\x49\xd3\ +\x4a\x37\xd9\xf3\x7e\xd2\x61\x89\x35\x5b\xee\xbc\xe7\x7c\x72\x79\ +\xce\x8d\xd2\xf3\xcc\xe7\x01\xa5\xfa\x33\xdd\xdc\x03\xc8\x68\x19\ +\x60\x75\x40\xae\x70\x3a\x7f\x5b\x4b\x7a\xe6\x9d\x3e\x1f\x93\x0d\ +\x66\xd6\xc4\x95\x39\x31\x18\xc8\x7d\x7e\x86\xaf\x27\xfe\xdb\xe7\ +\xa7\x81\xee\x75\x9a\xa7\xf4\x90\x51\x9e\x58\xb1\xae\xec\xac\x61\ +\x19\x8e\x9c\xbf\x72\x15\x80\xa4\xfb\x8e\x69\x7b\xe2\x9b\xc2\xeb\ +\x2d\x7e\x7d\x38\xb0\x05\x04\xdb\x13\xe6\xc1\x06\x57\xdc\xaf\x21\ +\x76\x4b\xdf\xb9\x55\x1e\x9c\x4b\x6c\x81\x4b\x6b\x7c\xe5\x4b\xea\ +\xfa\x1e\x80\x10\x31\x77\x63\x31\x30\xba\xa6\x28\xa9\xb4\x24\x29\ +\xf6\x9c\x7b\x4a\xc4\x71\x80\xcf\x62\xe5\xb9\xd4\xbc\x41\x20\xb5\ +\xd0\xb2\xb2\xca\x5c\x4b\x11\xc0\x19\xd9\xe7\xe6\x9e\x9e\x9e\x6b\ +\x73\xcb\xaf\xd3\xc8\x0b\x40\xb4\x22\xa5\x03\x0d\xa9\x03\x58\xb5\ +\xb6\x2a\xe4\xdb\x80\x42\x2b\xb4\xd2\x6a\x6b\x4d\x5a\x6f\xa3\xcd\ +\xb6\xa4\x48\x95\x26\x22\x5d\x5c\xa7\x56\x2f\xbd\xf6\xd6\xa5\xf7\ +\x3e\xfa\xec\x6b\x94\x51\x47\x1b\x32\xfa\x18\x63\x8e\x35\xf3\x2c\ +\xc8\x58\x0b\x53\x66\x9f\x63\xce\xb9\x16\x93\xae\xba\x18\x6b\x71\ +\xfd\xe2\xc4\xce\xbb\xec\xba\xdb\x96\xdd\xf7\xd8\x73\xaf\x03\x7d\ +\x4e\x3d\xed\xc8\xe9\x67\x9c\x79\x96\x66\x2d\x8a\x04\x04\x15\xed\ +\x3a\x74\xea\xb2\x64\x50\xc9\xaa\x35\x13\xeb\x36\x6c\xda\xba\x70\ +\xed\x96\x5b\x6f\xbb\x72\xfb\x1d\x77\xde\xf5\x0d\xb5\x37\xaa\x9f\ +\x51\xfb\x8a\xdc\x5f\xa3\x96\xde\xa8\xe5\x07\x28\xbf\xae\x7f\x47\ +\x8d\xd3\xbd\x7f\x0c\x91\x5c\x4e\x9a\x63\x06\x62\xb9\x26\x10\xef\ +\x8e\x00\x84\xce\x8e\x59\x1c\xa9\xd6\xec\xc8\x39\x66\x71\xe6\x12\ +\xd0\xaa\xcc\x2a\x9b\x83\xa3\xc9\x11\x03\xc1\x6a\x29\xb7\x9b\xbe\ +\x61\xf7\x1d\xb9\xdf\xe2\x16\x88\xee\x9f\xe2\x96\x7f\x85\x5c\x70\ +\xe8\xfe\x1f\xc8\x05\x87\xee\x07\xe4\x7e\xc6\xed\x17\xa8\xe9\x7a\ +\xe4\xb6\x3c\x00\x79\x16\x12\x53\x14\xb2\x90\x7e\xb7\x09\x93\xb6\ +\xdd\x8b\xf4\x98\x8a\x10\x9f\x2c\xa5\x59\x97\x78\xeb\x3a\x57\x19\ +\x4a\x48\x81\xaa\x9c\x96\xb1\xf2\x5c\xc6\x42\x0b\xda\x9a\x95\xf1\ +\x57\x0b\xbd\xa8\x95\xab\x5d\x2e\x69\x73\xe3\x25\x2a\x57\x86\xb5\ +\x5a\xcf\x01\xaf\x18\x2d\xce\xa6\x73\xda\xd6\xb5\x87\x69\x62\x5b\ +\x25\x8d\x7d\xc6\xb2\xa5\x49\x6b\x3b\xc4\x7b\xa6\x90\x0e\x10\xd8\ +\x1c\xe5\xf4\x6e\xbb\xd9\x26\x41\x9d\x35\x6b\xcf\x55\xaf\xb5\xeb\ +\x72\x51\x6f\xd9\xa9\x10\xb3\x96\xfb\xd9\xa9\xdd\x52\x76\xae\x37\ +\x49\x9f\xa5\x13\x34\x16\x11\x08\x32\xbc\xb9\x1d\x3a\x01\x4a\x4b\ +\xac\xb3\x5d\xcd\x7b\x4d\x2d\x66\xbb\x4e\xad\x56\xf4\xca\xca\xe4\ +\x7e\x59\x0b\x8d\x4e\xb3\xdb\xca\x7b\xe6\xbd\x09\x81\x10\xda\x2d\ +\x33\xe8\xbd\xc9\xc7\x17\x61\xb5\x17\xba\xdd\xdc\xc6\x1a\xd6\x37\ +\x7f\xbe\xc6\x23\x95\x2b\x57\x1b\x67\x6f\x53\xd5\x7d\xac\x15\x76\ +\x3b\x78\xee\x3b\xb4\xb7\x7d\xe2\xc9\x61\x9d\x4e\x92\x55\xd9\xe6\ +\x91\x69\x60\xc7\x65\x1d\x70\xcf\xbc\xca\x58\x72\x75\x66\xee\xb0\ +\xae\x86\x84\x6e\x8b\x8d\x94\x40\x73\xda\x59\x91\x98\x9d\x3d\xae\ +\x41\xe4\x40\x28\x4e\xde\xc2\x06\xcc\xe4\x0e\x82\xbe\x66\xa9\xab\ +\x9b\xf6\x2b\x90\xb9\x02\x8c\x2d\xa4\x1f\x3a\xc3\x0a\xb5\x34\x2f\ +\xe1\x37\x88\xb1\xba\x40\x2b\x35\xe2\x54\x6b\x10\x92\xb1\xcb\x11\ +\x23\xf4\x59\x0e\x29\xb6\x50\x47\xdd\x24\xc1\x5d\xda\xb9\xe5\x10\ +\x0c\x55\xd3\x95\x6c\xf6\xcd\x3e\x0f\x2c\xd1\x39\x0e\xcb\x5a\x54\ +\xbb\xb2\x3b\xf1\x0d\x7c\xd8\x5e\x9c\xa4\x8e\x91\x58\xfd\xee\xad\ +\x15\x65\x4e\x4c\xc6\x3e\x73\xdc\x21\x8b\xb8\xb4\x42\xd5\xdb\x4d\ +\x21\xfa\x9e\xd6\xc7\x2a\x43\x2b\x9b\x3b\x30\xa4\xcc\x3b\xf1\x47\ +\xd4\x4d\x95\xe5\x29\x6a\xe3\x32\x14\xb0\x95\x1a\x6f\x64\xee\x8e\ +\x2b\xd1\x42\x86\x13\x7f\xed\xce\xe5\x1e\x97\x9e\x5b\x01\x00\x46\ +\xe9\x9a\x97\xaf\x14\x3a\x1e\x52\x64\x15\x53\xab\x7d\xad\x3b\x93\ +\x54\x94\x9d\xed\xc8\xdd\xf5\x56\xa8\x2b\xd4\xc4\x2b\x7d\x17\xd0\ +\xea\xac\xaf\x14\x9b\xfb\x71\x8b\xc4\x48\xd2\x3a\x32\x8b\xc5\x24\ +\xbb\x92\x6b\x92\xa6\xa5\xe1\x1a\xc0\x06\x4f\x86\x93\xbb\x9e\x9e\ +\xa5\x27\x30\x89\xf7\xd8\x5e\x50\xdc\x89\x79\x80\x4f\x93\x2f\xa7\ +\xdc\x0d\x83\x2f\x71\xe6\x84\x9d\xd6\x7b\xb0\x7b\x47\x84\xfe\xb7\ +\x12\x73\x14\xf0\x7a\xc1\xd3\x49\x95\xcf\xa7\x89\xd5\xbb\x62\x6b\ +\x0b\x27\x09\x95\x0b\xca\xb1\x94\xbc\x5b\xc9\x23\x08\x39\xb6\xd7\ +\xfb\xda\x2d\xde\x00\x67\x40\x89\x84\xf1\xf8\x30\xe1\x5d\x28\x1f\ +\xe9\xeb\xae\x71\x73\xa1\xed\x79\x20\xdd\x64\x27\xf3\x4c\xcb\x50\ +\x64\x74\x04\x6e\x12\x0f\xc5\x59\xa8\x42\xed\xb4\x4f\x20\xc0\x6c\ +\xf2\x98\x8a\x60\x22\x4e\x47\x8a\x6c\xb0\x93\x9e\xf2\x3a\xba\x30\ +\x41\x6d\x42\xf8\xe3\x12\x31\x5d\xc1\x9f\xd8\xb5\x3c\x10\xa8\x8a\ +\xec\x66\xec\x29\xd7\x68\x18\x79\x8f\xa2\x1d\xd9\x1c\x90\x40\xf1\ +\x23\x0d\xa9\x88\x53\xda\xcc\x69\xe8\x62\xa7\xc6\x0d\x3d\xe9\x39\ +\x91\xec\xbe\x6d\x4d\xd1\x5c\x04\x38\x3d\x6e\x4d\x15\x3e\x51\xd7\ +\x98\x54\x85\xf4\x71\xc5\x37\x0c\xd8\x49\x88\x8f\xca\xe5\x80\x64\ +\x47\xa3\x29\xc5\xf7\x90\x60\x47\x1a\x51\x2b\xe3\x28\xf9\xd3\xbd\ +\x4e\xed\x89\xff\x41\x37\x91\xf1\x81\xb0\x41\x66\xd8\x30\x88\xa7\ +\xda\xd1\x92\xc0\x68\x24\xa8\x5b\xe8\x01\x5a\x27\x81\x58\xd3\x46\ +\x4b\x2f\xd4\xa9\x88\x93\xd4\x36\x0d\x51\x98\x68\xcb\xf1\xe9\x5c\ +\x94\x4c\xc3\x41\x9d\x2d\x47\x6d\xa4\x36\x28\x4f\xf6\x4e\xe5\x30\ +\x8c\x04\x72\xb8\xbc\xaa\x69\x99\x6b\xf7\x11\x91\x23\x92\x79\xc9\ +\xd6\x82\x8c\xa7\xb8\x19\xe3\x00\xf4\x5e\x36\xb5\x87\x8d\x82\x29\ +\x61\x27\x74\x30\xc8\xd9\x5b\xcc\x0b\x05\x24\x93\x75\x3e\x82\x09\ +\x53\x9b\xcb\x8a\x18\xca\x65\xe9\xb0\x0a\xaa\x8e\x20\x0d\xb7\x2c\ +\xb2\xd4\x7a\x48\x13\x46\x48\x39\x65\x92\xaa\xe5\x12\x85\x7e\x97\ +\x9c\x96\x0b\x1c\xc0\xad\x36\xac\x68\xd1\xe3\xe3\x5c\x02\x86\xd2\ +\xa6\x3c\x70\x92\xd4\x93\xd6\x3a\x6b\x99\x3a\x90\xe0\x1c\x2e\x8a\ +\x3a\xdb\xae\xce\xc6\xd6\x0e\xcc\x27\xdd\x05\xb9\x2b\x76\x53\x67\ +\x46\xba\x14\xcf\xa8\xdd\xef\xb9\x24\xaf\x23\x45\xff\x42\xa5\x42\ +\xb4\x53\xd9\x71\xf4\xe7\x86\xe0\x5f\xfc\xf6\x9e\x8f\x3b\xe2\xc7\ +\xe5\xbf\xbf\x3a\xfc\xd9\xe5\xbf\xbf\x3a\xfc\xef\x6b\x79\x5d\x1d\ +\xfe\xea\xf2\xed\x45\xec\xa2\xd0\xa9\x8d\x5a\xa1\xdd\xc4\x78\xcc\ +\x1a\xcf\x5a\x88\x20\x12\x85\x19\x1c\xcd\x32\x14\x9f\x81\x96\xb0\ +\xbc\xa8\x5e\xfb\xcd\x30\x39\xeb\xa0\xcc\x97\x99\xef\x80\x8d\xa9\ +\x57\x58\x7e\x11\x03\xd0\xb5\x16\x27\x1c\xe4\x72\xb4\xec\x41\x18\ +\x05\x87\x3a\xae\x4f\xc1\xc8\x01\x68\x0d\x8d\x66\xaa\xc8\xc6\x3e\ +\x1b\x67\x34\xe8\x29\x70\x2f\xe4\x08\x2d\xe1\x3c\x94\xfd\x73\xb0\ +\x02\x6d\x23\xc6\x0d\xf5\x9c\x08\xaf\x8c\x22\x2d\x52\x57\x45\x30\ +\x0d\x81\x4c\x12\x8a\x27\x32\x79\x86\xea\x30\x54\x7b\x52\x47\xd8\ +\x12\x42\x3a\xd0\x2e\x65\x12\xcb\x47\x33\xf5\xaf\x2a\x0a\x4c\xdd\ +\x67\x44\x96\x27\x2d\xe1\x82\xc5\x90\x8c\x6b\x41\x4a\xf1\xba\x8a\ +\xa6\xde\xbc\x8c\x5c\x4e\xb8\x26\x14\x9e\xca\x33\xe7\xc9\xe2\xd2\ +\xa9\xb2\x13\x13\xcf\xd5\x8f\x4b\xf7\xda\x38\xb1\x1d\x67\x47\xf6\ +\xa1\x67\xec\x94\x62\x09\xdb\x3d\x86\xb9\xa3\x66\x07\xeb\xd8\x68\ +\xe4\x1e\x59\xb9\x11\x39\xec\x33\x7a\x0d\x87\x23\x3a\x36\xe8\xb5\ +\x50\x6c\x70\xea\x95\x4d\x1a\x1f\x52\xdb\xd8\xac\x73\xdb\xd8\x89\ +\x02\x89\x18\x90\x12\x51\x11\x99\x4d\xb2\x14\x1a\x91\x91\x3d\x35\ +\x99\xc2\xab\x10\x5e\x05\x17\xa7\xd1\x3c\xf2\x9c\x5e\x36\x76\x9f\ +\x17\xf9\xc8\xdd\x2e\xb5\xf0\x22\x2b\x16\x3a\xbe\xa9\x93\x49\x24\ +\xb0\xa0\x34\xdd\x77\xaa\x0f\x0d\xd6\x46\x9b\x71\x48\x49\x73\x55\ +\x9d\x40\x79\x6d\x3f\xa6\xda\x08\x37\x28\x56\x0a\x08\x42\x30\xb3\ +\x61\x16\x02\x05\x71\x8b\x94\x6c\x20\x99\x98\xe9\x50\x9a\xd0\x15\ +\x94\xcc\xc0\x78\x03\xd1\x51\xd6\x8e\x5b\xec\x54\x98\xbe\x51\x1c\ +\x88\xc6\xf8\x63\x1a\x51\xc4\x87\xa2\xbc\xe3\x5c\x78\x34\x89\xfa\ +\x91\x4e\x25\x5f\x17\x1c\x80\x65\x58\x72\x88\x15\xcf\x22\x92\x87\ +\x28\xc2\x74\x0b\x96\xa1\xb3\x0c\xca\x94\x7b\x10\x2f\x10\xd4\x9f\ +\x72\x10\x04\x68\x69\x88\x3f\x6c\xd2\x86\x6f\xa0\x1a\xe3\x62\x9b\ +\xf4\x26\xb1\x6e\x5c\x77\x06\x96\x85\x0a\x98\x2b\xe5\x59\x0d\x66\ +\x11\xfe\x8b\x2b\x3a\x11\x5d\x4a\x13\x3d\xc1\xbf\x0c\x81\xfd\x27\ +\x9c\xe4\x14\xa7\x96\x2f\x56\xea\x72\xec\x63\x20\x9b\x70\x4d\xdd\ +\x65\xe7\x2a\xb4\xa1\xe6\x4e\x69\xe9\x1a\xa7\x42\x3c\xca\x0d\xb4\ +\x00\x12\xfc\x02\x3c\x43\x8b\x0b\x32\x42\xe5\x8b\xcb\x13\x04\x77\ +\x63\x48\x36\xba\x3e\x0c\x83\xec\x4c\x28\x0a\xcd\x89\xa5\xd1\x48\ +\x9f\xe3\xc2\x65\xa8\x7f\xc9\x44\x23\xe3\x44\xb4\x73\xd3\xa5\xd6\ +\xcd\xd2\x82\xb7\x6b\xd8\x4b\xea\x15\x76\x14\x7b\x29\x38\x8a\x49\ +\xc1\xcc\xc5\x7f\xe0\xe1\xbf\x4c\x70\x73\x0a\x39\x8b\xd0\xff\x3b\ +\xb1\xe5\x14\xe6\xab\x11\xaf\x45\x45\x63\xfb\xb6\x04\x3d\x3a\x95\ +\x2a\x9b\xa9\x58\x86\xef\xf3\xb6\x82\x4c\x9b\xa4\x3f\x2c\x8a\xb5\ +\x12\xde\x81\x6f\x18\x8f\xfb\x74\x63\xea\xe9\xca\x7b\xc6\x69\x90\ +\xe6\x1c\xed\xd8\xc7\x05\xb5\xa8\x34\x33\x8b\x62\x8c\x13\x6a\xf4\ +\x75\x77\x40\x05\x4a\x20\x3e\x9f\x38\x92\x76\xcc\xab\xcc\x4e\x26\ +\xd0\xfd\xd0\xc8\x90\xde\xf8\xd5\x39\xf1\x49\x53\xf3\xc2\x66\xa3\ +\xd2\x74\xd9\xd4\x22\xcf\xd1\x83\x05\x87\x2c\x05\xef\xb2\xaa\xf7\ +\x0b\x70\x96\x60\x15\x47\x90\x4e\xc1\x03\xca\x27\xb2\x94\x18\x2e\ +\x0c\x39\x3d\x2e\x8e\xc6\xfc\x15\x67\x4a\xf6\x1f\xba\x10\x9b\x64\ +\x60\x82\x06\x48\x08\xe6\x12\x6b\xec\x74\x5e\x50\x92\xd6\x77\x90\ +\x59\x78\x51\xb6\x49\x6e\x92\xb2\x6e\xd9\x2e\xa9\x30\x4a\x94\xa7\ +\x68\x53\x01\x60\x7e\x48\x03\x57\x4c\x39\x53\xa9\xc8\x1e\xc3\xd3\ +\xef\x57\x6a\x93\x92\xba\xd5\x10\xce\xb4\x8d\x76\xe1\x78\xb5\xa5\ +\xb7\x3a\x91\x69\xf0\x50\x94\xa0\x11\x71\x93\xc7\xd9\x9a\xa0\x76\ +\xb8\x62\x73\xc2\xb0\x86\x3b\x66\xe2\x84\x3b\x91\xe6\x9b\xc4\x37\ +\xa3\x63\x18\x02\x8c\xec\x86\xaa\xd4\xe1\x82\x96\x74\xca\xff\x2c\ +\x91\x23\xff\x99\x26\x51\x2b\x89\xe1\xca\x3d\xa0\x45\x1d\x4d\xa3\ +\xe6\x2d\x3c\xf6\xc2\x7d\xa9\xe2\x5d\x4f\x55\x8c\x5d\xaf\x64\xd6\ +\x62\x7d\xab\x12\x29\x2e\x4b\xf8\xa7\x69\xad\x2f\x4e\x03\xe2\x4c\ +\x8f\xbd\xf2\x0d\x86\x86\x11\xf1\x03\x4c\xd1\xc6\x78\x1c\x4f\xc6\ +\x4c\x8a\x97\x0a\x72\xe8\xe3\x7d\x7e\x12\x70\x77\x33\x69\x73\x2c\ +\xae\x41\x2b\x81\x33\x76\x77\xc0\x4d\x74\xa8\x18\x03\xf6\x10\x58\ +\x3a\x74\x80\x9d\x9d\xee\x10\xae\xd0\xba\x75\x1a\x13\x70\xa4\x54\ +\x74\xfa\x3a\x16\x71\x73\x82\x77\xd5\xb0\xf6\xd8\xa7\x66\x24\x1c\ +\xde\x15\xe1\x1b\x0d\x6b\x0f\x6a\xd8\x80\x30\x36\xee\xb9\x78\x1a\ +\xb6\x4b\xea\x4c\x77\x09\xae\x14\x87\x2e\xaa\xf5\x84\x91\xb0\x46\ +\x59\x48\x6e\xe7\xae\x0b\x79\xc2\x46\xa2\xe5\x6c\x8b\x5e\xeb\xb5\ +\x2d\xdc\x47\x46\xd8\x32\x1e\xcc\xc3\x4c\x65\xc1\x9a\xb9\x67\xa3\ +\xfa\x90\xb7\xf9\x52\x1d\xd0\x91\x42\x0b\xe9\x2c\x3c\x9e\xba\xab\ +\xb8\x65\xc3\x01\x53\x06\x1a\xfe\x18\xae\x36\x6d\x23\x9f\x00\x68\ +\x9d\x98\x91\x0a\x85\x5d\xb7\x4a\x5f\x59\x1c\x7a\x7c\x85\xeb\x21\ +\x1a\x4d\x2d\x00\x85\x7b\xdc\x8d\xe4\xd1\x12\x25\x21\x2e\x6a\x28\ +\x28\x51\x66\x6c\x08\x0e\x34\xcd\xb0\x0f\xad\x1b\x3d\x33\xd6\x97\ +\xc5\x8f\x48\x29\x00\x25\xba\x28\xa2\xbe\x60\xf5\xc2\x0f\x61\x4d\ +\x11\x52\x8a\x8e\xd0\x1d\x4f\x02\x32\xe9\x8b\x05\xfe\xdd\xe7\x27\ +\xc1\xb9\xb1\xe4\x10\x92\x3a\xb2\xd0\x6c\xd2\x82\xc2\x45\x73\x47\ +\x85\xf1\x21\xa9\x8f\x5e\x5e\x69\x3b\x6c\xe3\x91\xd1\x75\x92\xf7\ +\x38\x61\x2f\x88\x20\x4a\x58\xac\xce\x71\xa6\x9a\x1d\xca\x11\x3c\ +\xbd\x07\x87\xb6\xf8\xa3\x09\x41\xd7\xd7\xf3\x21\xfe\xfd\x3b\x5a\ +\x94\x1a\x35\x78\xd0\xae\xdc\x70\x97\x07\x10\xca\x41\xf5\x4f\xdf\ +\x78\x1d\x79\x7f\x05\xbf\xb0\x82\x18\x74\xa2\x7f\x74\xbb\xea\x55\ +\x4e\x3a\xa5\x23\x5a\x85\xdc\xb3\x22\x8a\x49\xe7\x42\xd4\x49\xbc\ +\x29\x73\x05\x99\x0b\x21\x3d\xe2\x32\x8b\x33\x8f\x34\xe3\xe2\x1d\ +\xfa\xa0\x43\x91\x32\xf7\xa6\x75\x3a\x7d\xb4\x8e\xb6\x61\x0d\x49\ +\x1f\xb8\xd2\x03\xf9\xac\xf4\xb9\x48\x49\xf4\x7e\xa5\x13\xe5\x7e\ +\xa7\x09\xb9\xc9\x2a\x11\x28\xff\x45\x93\xdc\x87\xc2\x91\xf5\x1d\ +\x1f\x72\xad\xc6\x7a\xdc\xae\xd0\xb3\x0c\xfc\xce\xce\x09\x1e\x6d\ +\xa1\x07\xb9\x0c\x76\xad\x3e\x57\x92\x18\x94\xdc\x6b\xd4\x6d\x3a\ +\xba\xeb\xfa\x83\x35\xde\x13\xf6\xd1\x6c\x61\xa2\xf4\xa0\xa0\x39\ +\x51\x04\x48\x0a\xf1\x0e\xec\x92\xb4\x68\x2d\x44\x1d\x9a\x2a\xf5\ +\x06\x77\xfd\x65\x14\xff\x8d\xbd\xfc\x3c\x19\x63\x6f\x12\xed\x63\ +\x98\xdc\xc2\xc7\x30\x3f\x4f\xd6\x18\x1a\xdb\xfd\x1f\xae\x38\x64\ +\x7a\xa3\xbf\x58\xf1\x97\x05\xd3\x93\xc2\xef\x91\x5b\x53\xdc\x4d\ +\x76\xcb\x81\x92\x3f\x32\x92\x20\xba\xe2\xa4\x98\x60\xff\xb0\xee\ +\x44\xeb\xf0\xf9\x2c\xed\x04\x95\x4a\x67\x23\x5e\x75\x76\x9a\xf8\ +\xb2\xde\x5b\x67\x41\x31\xd0\xd2\xfc\x41\x54\xce\x6f\x77\x1a\x3e\ +\x0f\xe0\xa4\xfd\xa3\xbd\x7e\x03\x27\xfc\x29\x3a\xbf\x03\x27\xfc\ +\x1e\x9d\x4d\xfe\x56\x94\x1c\x79\xa0\x9f\x44\xb6\x65\x59\x44\xf8\ +\xd1\x27\xba\xca\xd1\xd5\xfc\x07\x29\x6a\x60\xf4\x8a\x1b\x94\xf2\ +\x1e\xfe\x0d\x9f\x34\xd7\x4b\xab\x17\x05\xf1\x00\x00\x01\x85\x69\ +\x43\x43\x50\x49\x43\x43\x20\x70\x72\x6f\x66\x69\x6c\x65\x00\x00\ +\x28\x91\x7d\x91\x3d\x48\xc3\x40\x1c\xc5\x5f\x53\xc5\x0f\x5a\x04\ +\xad\x20\x22\x92\xa1\x3a\x59\x10\x15\x71\xd4\x2a\x14\xa1\x42\xa8\ +\x15\x5a\x75\x30\xb9\xf4\x0b\x9a\x34\x24\x29\x2e\x8e\x82\x6b\xc1\ +\xc1\x8f\xc5\xaa\x83\x8b\xb3\xae\x0e\xae\x82\x20\xf8\x01\xe2\xe2\ +\xea\xa4\xe8\x22\x25\xfe\xaf\x29\xb4\x88\xf1\xe0\xb8\x1f\xef\xee\ +\x3d\xee\xde\x01\x42\xb5\xc8\x34\xab\x6d\x1c\xd0\x74\xdb\x4c\xc4\ +\xa2\x62\x2a\xbd\x2a\x76\xbc\xa2\x0b\x41\xf4\x62\x18\xfd\x32\xb3\ +\x8c\x39\x49\x8a\xc3\x73\x7c\xdd\xc3\xc7\xd7\xbb\x08\xcf\xf2\x3e\ +\xf7\xe7\x08\xaa\x19\x8b\x01\x3e\x91\x78\x96\x19\xa6\x4d\xbc\x41\ +\x3c\xbd\x69\x1b\x9c\xf7\x89\x43\x2c\x2f\xab\xc4\xe7\xc4\x63\x26\ +\x5d\x90\xf8\x91\xeb\x8a\xcb\x6f\x9c\x73\x75\x16\x78\x66\xc8\x4c\ +\x26\xe6\x89\x43\xc4\x62\xae\x85\x95\x16\x66\x79\x53\x23\x9e\x22\ +\x0e\xab\x9a\x4e\xf9\x42\xca\x65\x95\xf3\x16\x67\xad\x58\x66\x8d\ +\x7b\xf2\x17\x06\x32\xfa\xca\x32\xd7\x69\x0e\x21\x86\x45\x2c\x41\ +\x82\x08\x05\x65\x14\x50\x84\x8d\x08\xad\x3a\x29\x16\x12\xb4\x1f\ +\xf5\xf0\x0f\xd6\xfd\x12\xb9\x14\x72\x15\xc0\xc8\xb1\x80\x12\x34\ +\xc8\x75\x3f\xf8\x1f\xfc\xee\xd6\xca\x4e\x4e\xb8\x49\x81\x28\xd0\ +\xfe\xe2\x38\x1f\x23\x40\xc7\x2e\x50\xab\x38\xce\xf7\xb1\xe3\xd4\ +\x4e\x00\xff\x33\x70\xa5\x37\xfd\xa5\x2a\x30\xf3\x49\x7a\xa5\xa9\ +\x85\x8f\x80\x9e\x6d\xe0\xe2\xba\xa9\x29\x7b\xc0\xe5\x0e\x30\xf0\ +\x64\xc8\xa6\x5c\x97\xfc\x34\x85\x6c\x16\x78\x3f\xa3\x6f\x4a\x03\ +\x7d\xb7\x40\xf7\x9a\xdb\x5b\x63\x1f\xa7\x0f\x40\x92\xba\x8a\xdf\ +\x00\x07\x87\xc0\x68\x8e\xb2\xd7\x3d\xde\xdd\xd9\xda\xdb\xbf\x67\ +\x1a\xfd\xfd\x00\x4b\x21\x72\x97\x6e\x0e\xe8\xfe\x00\x00\x00\x06\ +\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\ +\x00\x09\x70\x48\x59\x73\x00\x00\x2e\x23\x00\x00\x2e\x23\x01\x78\ +\xa5\x3f\x76\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xe3\x09\x0d\x12\ +\x23\x0c\x0b\xc3\x35\x74\x00\x00\x18\xf2\x49\x44\x41\x54\x78\xda\ +\xed\x9d\x77\x7c\x14\x65\xfe\xc7\x3f\x33\xb3\x35\xbd\xf7\xde\x2b\ +\x21\x84\x1e\xa4\x84\x5e\x45\x29\x52\x2c\x27\x8a\xf2\xb3\xa0\xa8\ +\xdc\xa1\x67\xd7\xb3\x9c\x0d\xcf\xd3\x58\x4f\x25\x80\x06\x0f\x39\ +\x41\x90\x12\x4a\x88\x74\x12\x42\x02\x69\xa4\xb2\xe9\xa4\x6f\xda\ +\xee\xce\xce\xfc\xfe\x08\x21\x6c\xb6\x64\x37\x09\x09\x21\xcf\xfb\ +\xaf\x7d\xed\x3c\xf3\x3c\x33\x4f\xfb\xcc\xf3\x7c\x9f\xe7\xfb\x50\ +\xf1\xf1\xf1\x3c\x08\x04\x02\x81\x40\x30\x11\x01\x00\xcc\x58\xb0\ +\x8a\xe4\x04\x81\x40\x20\x10\x8c\x26\xe9\xf7\xed\xa0\x49\x36\x10\ +\x08\x04\x02\xa1\x37\x10\x01\x21\x10\x08\x04\x42\xaf\x10\x0c\x74\ +\x82\x8d\xad\x3c\x92\xb3\x55\x28\xae\xe7\xb1\x30\x52\x00\x5f\x67\ +\x86\x94\x02\x81\x40\x20\x10\x01\xd1\x8f\xbc\x8d\xc7\x9f\xb9\x2a\ +\x24\xe4\xa8\xa1\xb8\x6e\xb6\x4f\x3e\xa2\x44\x9c\x33\x8d\x85\x91\ +\x02\x78\x39\x12\x21\x21\x10\x08\x04\x22\x20\x37\xd1\xaa\xe0\x71\ +\x32\x8f\xc5\x96\x2c\x16\x2d\x9c\xf6\xf5\x23\x55\x1c\x8e\x54\x29\ +\x31\xc7\x8d\xc6\xbc\x08\x21\xdc\xed\xc9\xac\x1a\x81\x40\x20\x0c\ +\x6b\x01\x69\x53\xf2\x38\x73\x85\x45\x42\x16\x8b\x06\xb6\xe7\xf0\ +\xfb\xcb\x39\xec\x2f\x57\x60\x91\x27\x8d\xd9\xe1\x42\xb8\xd8\x12\ +\x21\x21\x10\x08\x84\x61\x25\x20\x0a\x15\x8f\x73\x05\x2c\xb6\x5e\ +\x62\x71\x4d\x65\xfa\xfd\xbb\x65\x1c\x76\xcb\x14\x58\xe2\xcd\x60\ +\x66\xb8\x00\x8e\xd6\x44\x48\x08\x04\x02\xe1\x8e\x16\x10\x25\xcb\ +\x23\xad\x50\x8d\x6d\x97\x58\x94\x2b\xfa\xbe\x37\x71\x67\x89\x1a\ +\x3b\x4b\xd4\x58\xe1\xc7\x60\x7a\x98\x00\x76\x96\x44\x48\x08\x04\ +\x02\xe1\x8e\x12\x10\x56\x0d\xa4\x17\xb3\xf8\x29\x93\x45\x71\x5b\ +\xff\x6f\x6a\xff\xb9\x50\x8d\x1d\x45\x6a\xac\x0e\x60\x30\x35\x54\ +\x08\x1b\x73\x8a\x94\x1a\x81\x40\x20\x0c\x65\x01\x51\x73\x40\x66\ +\x09\x8b\xc4\x4c\x16\x79\x2d\x86\x85\xc3\x5d\x4c\xc1\xc3\x1c\x68\ +\x51\x01\x97\xe4\xa6\x8b\x0c\xc7\x03\x09\x57\xd4\xd8\x91\xaf\xc6\ +\xea\x60\x06\x93\x43\x84\xb0\x94\xf6\xaf\x90\xa8\xd5\x6a\xb4\xb6\ +\xb6\x81\xe3\x78\x80\x02\xc4\x22\x11\x24\x12\xf1\x80\x14\x82\x8a\ +\x65\xd1\x2c\x6f\x01\xc7\x73\xa0\x40\x41\x24\x12\xc2\xc2\xc2\x9c\ +\xd4\xce\x41\x80\xe7\x81\xe6\x76\x1e\xea\xeb\x0b\x3e\x44\x02\xc0\ +\x4c\x4c\x3e\x5a\x08\x84\x7e\x11\x10\x8e\x07\x2e\x5f\x65\xf1\x4b\ +\x26\x8b\xcb\x06\xc4\x40\x48\x01\x8f\x45\x08\x10\xed\x23\x80\xad\ +\x45\x57\x03\x54\xb2\x3c\x0a\x2a\x39\xec\xb8\xa8\x42\x46\x93\x69\ +\x62\xa2\xe0\x81\xff\xe4\xa8\x91\x98\xa7\xc6\xfd\xa1\x02\xc4\x06\ +\x09\x60\x2e\xe9\x5b\xe3\x2e\x2b\xaf\xc4\x1f\x49\x27\xf1\x7d\x52\ +\x26\xe4\xac\xe6\x32\xb1\xe5\xa3\xbc\xb1\x78\x6e\x2c\xc2\xc3\x82\ +\x40\x51\x9a\xe9\x6c\xff\x65\x1f\x72\x0a\xca\x8c\x4a\x43\x2c\x14\ +\xe0\x99\xc7\x97\x6b\x89\x42\x75\x75\x0d\xfe\x48\x3a\x89\xaf\xf7\ +\x5f\xd0\x4a\x7b\x56\xa0\x13\x96\xce\x8f\xc5\x98\x98\x48\x30\x4c\ +\xd7\x12\xe7\xf6\x76\x05\xbe\xfe\xe1\x57\x28\x54\x9a\x2b\x13\x66\ +\x4c\x1e\x85\xe8\xa8\x70\x00\x40\x7a\x46\x16\x0e\x25\xa7\x6a\x5c\ +\x97\x88\x84\x58\xb7\x66\x09\x84\x42\x21\x9a\x9b\x5b\xf0\x6d\xc2\ +\x6f\x50\xb1\x6a\xbd\xcf\x6c\x26\x15\xc1\xdf\xdb\x0d\xd1\x51\xa1\ +\x70\x76\x72\xd0\x19\xe6\x8b\x6f\x77\xa0\xb2\xa6\xd1\xa8\x3c\x30\ +\x97\x8a\xf0\xdc\x13\xab\x20\x14\x0a\xc1\xf3\x40\xc2\x49\x05\x1a\ +\xda\xf4\x87\x17\x32\x80\xa7\x35\x85\x08\x0f\x06\x3e\x4e\x8c\xce\ +\x8e\x7e\xfb\x29\x25\x6a\x5b\x79\x83\x71\xb8\x5b\x51\x88\xbc\x1e\ +\x07\x65\xa0\xaa\x34\xb5\xf2\x48\xce\x51\x61\x77\x81\x1a\x75\xdd\ +\x16\x7d\x84\x59\x50\x98\x17\xcc\x60\xb4\xbf\x10\xc2\x6e\x8f\x52\ +\xd3\xc4\xe1\xe7\x73\x2a\x70\x06\xaa\xb2\x85\x08\xf0\xb3\xa7\x11\ +\xe5\xcd\xc0\xd6\x82\x4c\xc5\x12\x86\xa1\x80\xf0\x3c\x90\x53\xa6\ +\xc6\x7f\x33\x54\x48\x6f\xec\xb9\xe3\x7f\xe3\x2e\x11\x82\xdd\xb5\ +\x1b\xbe\x48\x40\x21\xd4\x83\xc1\xdf\x9c\x68\xbc\x73\x50\x61\x50\ +\x84\xf4\xd1\xc2\x01\x5f\x5d\x66\xf1\x73\x0e\x8b\xfb\xc3\x04\x98\ +\x10\x28\x80\xb4\x17\x5f\x89\x29\x27\xce\xe1\xf9\xf8\xbd\x68\xd7\ +\xd3\xfa\x77\xa4\x95\x60\x47\x5a\x09\x36\x2e\x1a\x8d\x55\xcb\xe6\ +\x83\x61\xba\x1a\xff\x99\x8b\x05\x38\x5a\x58\x63\x74\x5a\x8f\xb6\ +\xb4\x6a\x08\xc8\xc5\xcc\x6c\x3c\xf3\xe1\x2f\xa8\x53\xe9\xee\xc4\ +\x0f\x5e\xa9\xc6\xc1\xcd\xbb\xf0\xe0\x84\x4c\x3c\xb5\x76\xf9\x8d\ +\xd1\x50\x6b\x5b\x1b\xbe\x3d\x9e\xa3\x15\x3e\xc0\xc7\xf5\x86\x80\ +\x14\x5f\xad\x40\xc2\xa9\x7c\xad\x30\x7f\x59\xa5\x80\x50\x28\x84\ +\xbc\xb9\x05\xff\x49\xc9\x35\xe2\xa9\xb3\x00\x24\xe1\xcd\x55\x93\ +\xb1\x70\xee\x54\x0d\x21\xe3\x38\x0e\xbf\x9e\xc8\x45\xb5\x82\x35\ +\x3a\x0f\x9e\x7a\x54\x05\xa1\x50\x08\x96\x03\x92\x4a\x39\x9d\xcb\ +\xba\x35\xa8\x04\x90\xab\xc6\x02\x0f\x1a\x2b\xc6\x89\x20\x15\x51\ +\x1a\x1f\x32\x87\x4b\xd5\x68\x54\x1b\x11\x47\x9e\x1a\xb3\x5d\x69\ +\xac\x1e\x2f\xd2\xf9\xc1\x51\x52\xad\xc6\xbb\xc7\x95\x7a\x17\x7d\ +\x64\x35\xf3\xc8\x4a\x65\x31\xb6\x50\x8d\x27\xa7\x8a\x61\x71\x53\ +\x1c\x0d\x2d\x3c\x8e\x56\x73\x3d\xbf\x7c\x19\x07\x61\x26\x8b\x27\ +\xa3\x04\x98\x14\x22\x34\x28\x66\x04\xc2\x50\xa1\xc7\xcf\x21\x9e\ +\x07\xae\x94\xab\xf1\xc1\xc1\x76\xbc\x9c\xa2\x34\x4a\x3c\x84\x14\ +\x10\xe4\x66\x78\x63\xa0\x44\x44\x21\xd6\xb3\x6f\x9b\x07\x1b\xd5\ +\xc0\xe7\x99\x2c\xd6\xef\x6e\xc7\xf1\x2c\x15\x14\x2a\xe3\xc5\x28\ +\x3d\x23\x0b\x4f\x7e\xfe\xbb\x5e\xf1\xb8\x99\x0f\x76\x9f\xc7\x81\ +\xa4\x94\x7e\xcb\xf4\xa2\x62\x19\x1e\x78\xf7\x67\xbd\xe2\x71\x33\ +\x5b\x4e\xe5\x63\xcb\x4f\x7b\x06\xb5\x92\xbc\xba\xfd\x38\x76\xec\ +\x3a\xd0\x6f\xf1\x99\xda\x77\xfe\x5e\xca\xe1\xf3\x63\x0a\x18\x91\ +\x5d\x7a\x39\x50\xc1\xe1\xf3\x64\x05\x94\xac\x66\x79\xd7\x36\x71\ +\x78\xed\x98\xd2\xa8\x15\x83\x67\xeb\x79\x7c\x93\xa2\xbc\x31\xbd\ +\x65\x2a\x2a\x1e\xd8\x9c\xce\xe2\xc8\x65\x15\xe9\x79\x08\x77\xbe\ +\x80\x14\x55\xa9\xb1\x39\x49\x81\x4d\xc9\x4a\x9c\xa9\xe3\x4d\x6a\ +\x28\x05\x95\x3d\xb7\x76\x3f\x47\x1a\x4b\x7d\x18\xd8\xf5\xd1\x94\ +\x5f\xc7\x02\x9f\x5e\x64\xb1\x61\x77\x3b\x4e\xe6\xaa\xb4\x3a\x89\ +\xee\x28\x95\x2a\x7c\xfc\x9f\xbd\x26\xa5\xf1\x7a\xc2\x51\xd4\xd6\ +\xd6\xf7\x39\xc3\x79\x9e\xc7\x0f\x89\xfb\x4d\xba\xe7\xdf\x87\x32\ +\x71\x25\xbf\x78\x50\x2b\xca\xbb\xbf\x9e\x41\x6e\x5e\xe1\xa0\xa5\ +\x7f\xaa\x96\xc7\x9f\x39\x7d\xeb\x78\xcf\xd4\xf1\x38\x9e\xad\x39\ +\x62\xfa\x3d\x43\x05\xb9\x09\xc2\xf4\x67\x0d\x87\xcc\x12\xb6\x4f\ +\xcf\xf1\x45\x26\x8b\x8a\x3a\x8e\xf4\x3e\x84\x3b\x5b\x40\x9e\xff\ +\xe4\x67\xa4\x96\x5e\xeb\x55\xc4\xaf\x1f\x57\x22\x25\x5b\x05\xb9\ +\x81\x95\x59\x81\x6e\x0c\x56\x4e\x10\xe1\xab\x25\x52\x7c\x3a\x53\ +\x8c\x67\xa2\x04\x98\xec\xd8\xfb\x39\xe2\x46\x25\x87\xcf\x8e\xca\ +\x50\x58\x52\x67\x30\x5c\x61\xd1\x55\xa4\x57\x37\x9b\x14\x77\x3b\ +\xc7\x23\xe3\x72\x5e\x9f\x33\xbc\xaa\xba\x06\xbb\x2e\x96\x9a\x7c\ +\xdf\xd9\xb4\x4b\x83\x5e\x59\xfe\x38\x7c\x7a\x50\xd3\x4f\xcc\x66\ +\xc1\xaa\xfb\x16\xc7\x4f\x39\x2c\x3a\xcd\x47\xad\x0a\x1e\xbb\x65\ +\xa6\x77\xe4\x29\x85\xea\x3e\xbf\xcb\x89\x7c\x96\xf4\x3e\x84\x21\ +\x8f\xc1\x6f\xff\xfc\xab\x79\x90\x94\x5e\x81\x77\xc8\x78\x88\x7d\ +\xc6\xa2\x8d\x91\x1a\x1d\x71\x1b\xd7\x31\x5c\x47\x3a\x8b\x20\x73\ +\x0a\xae\x66\x14\x18\x1a\x58\x39\x56\x08\xbb\x6e\x86\x44\x9a\x06\ +\x3c\x1c\x68\x78\x38\xd0\x98\x1c\x06\xac\x55\xf0\x38\x9b\xcf\xe2\ +\xb3\x0c\xe3\x1a\x99\x90\x02\x04\x2d\xf5\xa8\xa9\x94\xa1\xa4\x4d\ +\x09\x8e\xb7\x32\x18\xbe\xac\xa2\x5a\xe7\xff\x6f\xaf\x9e\x82\x99\ +\x71\x13\x51\x5a\x56\x89\x25\xaf\x7e\xaf\x75\xbd\x44\x56\x69\x30\ +\x5e\x09\x4d\x61\x84\x8b\x25\xb8\x6e\x7d\x92\x99\x44\x00\xa9\x54\ +\x02\x00\xb8\x76\xad\x56\xe7\xbd\xff\x37\x3d\x02\xcb\xee\x9e\x8e\ +\xfa\xc6\x26\x3c\xfd\x4e\x02\xca\xdb\x34\xdf\xfd\xf2\x95\xd2\x5b\ +\x5e\x19\x9e\x98\x11\x81\xe8\xc8\x40\xd4\xd5\x37\xe1\xb5\x2d\x47\ +\xb4\xa6\xf7\xb6\x9f\xc8\xc3\xd3\x8f\xa9\x35\x6c\x21\xdd\x71\x12\ +\x0b\xe0\x65\x6b\xa6\xf5\xbf\x8d\x85\x18\x42\xa1\xe1\xa1\xe6\x58\ +\x5b\x0a\x73\x42\x04\x50\xb0\xc0\xd6\x0c\x16\x65\xdd\xf6\x13\x5d\ +\x53\x75\x18\xad\x0d\x79\x29\x18\x6b\x4b\x21\x2e\x40\x80\x76\x15\ +\x8f\xad\x97\x59\xd4\x74\x1b\xb4\x34\xb0\xc0\xb5\x26\x0e\x6e\x76\ +\x34\x6a\xe5\xba\xc5\x23\xce\x99\xc6\x8a\xd1\x42\xb4\x2a\x79\xbc\ +\x79\x54\xa9\x65\x54\x3f\x53\xc3\xe1\xe9\x1e\xf2\x72\x75\x00\x03\ +\x7f\x47\x1a\x15\x8d\x3c\xbe\xc9\xd2\xae\xc7\x67\x2a\x38\x2c\x25\ +\xfd\x0f\xe1\x4e\x16\x90\xce\x2f\xef\xdc\xac\x53\xb0\x2f\xba\x00\ +\xd7\x88\x38\xf0\x2e\xe1\x50\xc2\x34\xdb\x45\x5e\x0b\x7f\x63\xa9\ +\x6f\xf1\x61\x25\x5e\x8c\x13\xe9\xdd\x18\xd8\xa6\xe0\x71\x3c\x87\ +\xc5\x0f\xd9\xc6\x89\x87\x05\xdb\x8a\xfa\xca\xab\xb8\xd2\x68\xfc\ +\x88\x42\xa9\xd4\x3d\x15\x32\x22\x22\x08\x52\xa9\x04\xfe\x7e\x5e\ +\x08\xb3\x33\x43\x56\x5d\xab\xc6\x75\x85\xd2\xf0\x14\x4a\x8c\xbb\ +\x0d\xe2\xdf\x7f\xd6\x60\x18\x7d\x2b\x9f\x26\x8e\x8d\x80\x83\x83\ +\x1d\x1c\x1c\xec\x30\x6d\x84\x17\xb6\x9d\xd1\x9c\x2e\x6a\x6a\x51\ +\xdc\xf2\xca\x10\x12\xe0\x85\x71\x63\x46\x02\x00\x4a\xcb\xaf\xe1\ +\xb3\x83\x19\x5a\x75\x41\xa1\x50\xc2\xcc\x4c\xff\x87\xc4\x3d\xb1\ +\x41\x78\xf2\xd1\xfb\x7a\x95\xbe\x8b\x05\x85\x28\x9f\x8e\x2a\xc9\ +\x71\xc0\x07\xa9\xda\xf9\xdd\x93\x9d\xcb\xc3\x8a\xc6\x98\x80\x8e\ +\x38\x94\xea\x8e\xe9\x22\xad\x3a\xad\xec\x88\x43\x9f\x4d\x65\x8c\ +\x27\x03\x7b\x2b\x1a\xf6\x00\xa2\x1d\x68\x1c\xae\xd4\x14\x1a\x05\ +\xdf\xb1\x8c\x9d\x31\x30\x58\x0e\x70\xa4\x31\xc2\x47\x80\x28\x00\ +\x19\x95\x6a\xad\x29\xe0\x5b\xb1\x67\x8a\x40\xb8\xed\x04\xa4\x93\ +\xda\xb6\x76\xd4\x9e\xdb\x07\x4f\x87\x54\xd8\x85\xc5\xa1\xd5\xda\ +\xbb\x57\x09\x16\xb6\xf2\x78\xf7\x88\xb6\x88\xb0\x6a\x20\xad\x90\ +\xc5\x0f\x19\x2a\x54\x29\x7b\x8e\xc7\x9c\x57\x41\x51\x53\x86\xb3\ +\xd5\x35\x77\x48\x51\x50\x37\x8d\xc8\x06\x7f\xa9\xa7\x99\x99\x78\ +\x50\xd3\x17\xe9\xa9\x99\xa6\x74\xbb\x92\xde\xc6\x41\xe9\xfc\xd9\ +\xfb\xbc\x14\x50\x5a\xa9\x72\x44\x3f\x08\xc3\x49\x40\x3a\x91\xd5\ +\x54\x41\x76\xfc\x27\x04\xfa\x86\x42\x1a\x38\x05\x6d\x62\x9b\x3e\ +\x8b\x48\x6e\x99\x1a\xdb\x2f\xa8\x8c\xda\x64\x28\xa1\x38\xf0\x0d\ +\xd5\x28\xa9\xac\x40\x35\xab\x1e\xb0\x8c\x1a\xe9\x64\x81\x98\xa8\ +\x60\x52\x63\x08\x04\x02\xa1\xb7\x02\xd2\xc9\x95\xa2\x6c\x58\xca\ +\x72\xe1\x1a\x32\x11\x22\xaf\xd1\x50\x30\x92\x5e\x89\x88\x8f\x25\ +\x85\x23\x55\x3d\x1b\x32\x69\x0a\x90\xb4\x35\xa0\xa6\x42\x86\xe2\ +\xd6\x5b\x3b\x9d\x43\x51\x14\x3c\xec\x2d\x30\x6b\x5c\x10\xc2\x43\ +\x7c\xe1\xe3\xe5\x0e\x27\x27\x07\xad\xcd\x84\x04\x02\x81\x40\x04\ +\xa4\x97\xc8\x59\x0e\xf2\x4b\x7f\xc2\xa1\x28\x0d\x8e\xe1\xd3\x01\ +\xe7\x50\xb0\x26\x9c\x92\x5b\xd8\xca\xa3\xb0\xb5\xe7\x51\x87\x19\ +\xdb\x86\x96\x6a\x19\xce\xd7\x37\xdd\xd2\xcc\xe0\x79\xfe\x86\x80\ +\x7c\xf8\xc6\x93\xb7\x4d\x21\xd9\x09\x19\x4c\x1b\x17\x4a\x6a\x2b\ +\x81\x40\xb8\x73\x04\xa4\x93\x9a\x96\x56\xd4\x9c\xdd\x03\x4f\xa7\ +\x54\xd8\x86\x4d\x47\x9b\xa5\x7b\xbf\x3c\x9c\x14\x2c\x54\xb5\xe5\ +\xb8\x5c\x55\x8d\x81\xb0\x39\x56\x5f\xab\x85\xaf\x8f\xe7\xa0\x17\ +\xca\xb4\xd8\x91\x98\x3c\x3e\x12\x0e\xf6\xb6\xb0\xb3\xb3\x81\xb5\ +\xb5\x15\x68\x32\xfa\x21\x10\x08\x77\xa2\x80\x74\x22\xab\x2e\x87\ +\xac\x3a\x01\xfe\x7e\x91\x30\x0b\xb8\x0b\xed\x62\xab\x5e\xc5\x23\ +\xa2\x78\x50\x8d\xd7\x20\xab\x2c\x43\xb5\xaa\xff\xed\x1c\x02\x81\ +\xee\x55\x64\xf1\xdb\x93\xe0\xe1\xee\x02\x77\x37\x97\x41\x2d\x94\ +\xd1\xa3\x22\x87\x6c\x85\xa2\x40\x84\x8e\x40\x20\x02\xa2\x87\x8d\ +\x0b\x47\x23\x23\x57\x86\x03\x79\x55\x7a\xc3\x14\x14\x66\xc2\x52\ +\x96\x05\xb7\x90\xbb\xc0\x78\x45\x43\x45\x1b\xb7\xa2\x87\xa6\x00\ +\x49\x7b\x23\x6a\xca\x65\x28\x6e\x6d\x37\x18\x76\xbe\xab\x04\xb1\ +\xc1\xb6\x48\xc9\xae\xc3\x1f\x55\xa6\xd9\x44\x3c\xdd\x75\x0b\x44\ +\x5a\x95\x1c\xab\x5f\xfc\x1a\xef\x3f\xb5\xe8\xc6\x72\x56\x53\x48\ +\x2d\x6b\xc0\xa2\xc7\xfe\xa1\xf1\x1f\x43\x51\x78\x6b\xfd\x12\x44\ +\x84\x0f\x0f\x03\xfc\xb6\xe3\xd9\xd8\x7f\x56\x33\x0f\x84\x0c\x8d\ +\xcd\x7f\x7f\x08\x9e\x1e\x6e\xa4\xc5\x11\x08\xc3\x59\x40\xc2\x43\ +\xfd\xb0\x72\xd9\x3c\xdc\x73\xfe\x22\x36\x6f\x39\x88\x1c\x3d\x2e\ +\x55\xe5\x2a\x35\x72\x33\x8f\xc1\xa1\x28\x15\xf6\xe1\xd3\x01\xa7\ +\x60\x70\x06\xbe\x4e\xcd\xd5\xed\x68\xae\x96\xe1\x7c\x9d\x61\xef\ +\xae\xe1\x66\x0c\x9e\x9f\xeb\x8d\xb1\x23\x5c\xc1\x30\x34\xe6\x4c\ +\x52\xa1\x6a\xf3\x79\xa4\x35\x1b\xbf\xb3\x37\x28\xd0\x17\x0f\x4f\ +\x0a\xc2\xf7\x7f\x6a\xef\x2c\xaf\x53\xa9\xb1\xf6\x93\x5d\xd8\xb8\ +\x50\x86\xe5\xf7\xce\x82\x58\x6c\xfc\x72\xd6\x76\x8e\x47\x71\xb3\ +\xf6\x1a\xe4\xc6\xa6\xe6\x21\x57\x31\x1a\x9a\x5a\x74\x8f\x30\x7a\ +\x98\x4a\x93\xb3\x1c\xe4\x3a\xf2\xa0\xb9\xb9\x75\xc0\xdf\xa1\x45\ +\xcf\x72\xf0\x81\x9c\x0d\xe4\x79\xa0\xb6\x5d\x7b\xfe\x55\x48\x06\ +\x6a\x84\xe1\x28\x20\x40\xc7\x14\xd0\xc4\xf1\xa3\x10\x15\x19\x82\ +\x03\x87\x4f\xe2\xf5\x9f\xf5\x3b\x1a\xac\x69\x96\xa3\xe6\xcc\xff\ +\xe0\xe9\xe2\x09\xab\x90\x38\x28\x2d\x5d\x35\xae\x4b\xc1\x82\xad\ +\xab\xc0\xa5\xca\xaa\x1e\xed\x1c\x6f\x4d\x76\xc1\x9c\x49\xde\xb0\ +\x30\x13\x75\xdd\x2f\x11\x62\x82\xaf\x05\xd2\x32\x1b\x8c\x7e\x7e\ +\x9a\xa6\xf1\xc4\x23\xcb\x60\x67\xb3\x1f\x1f\xfd\x9e\xaa\x33\xcc\ +\x07\x7b\xce\x23\xab\xa0\x0c\x1b\x1e\x5f\x06\x27\x47\xfb\x61\x51\ +\x19\x4e\xa5\x66\xa3\x51\xde\x82\xfa\x06\x39\xbe\x3a\x92\xa5\x75\ +\xdd\x4d\x2a\x80\x58\x2c\xba\xad\xdf\x21\xa7\x96\xc3\xde\x0b\x4a\ +\x28\x58\xe0\x57\x3d\x2e\x47\xcc\x07\xe0\x7c\x8f\xe3\x05\x6a\x14\ +\xd6\x70\x28\x6b\xe2\x75\x1e\x5b\x10\x62\x49\x14\x84\x30\x4c\x05\ +\xe4\x46\x43\x34\x37\xc3\xbd\x8b\x66\x60\xe2\xb8\x28\xec\xd8\x95\ +\xa4\xd3\xcd\x78\x27\xb2\x4a\x19\x50\xf9\x23\xfc\x03\x46\x42\xe2\ +\x1f\x0b\x4e\x64\x01\x41\x73\x0d\x4a\x2b\xca\x50\xa5\x34\x3c\x7a\ +\x78\x24\xd8\x12\xab\x67\xfb\xc3\xd5\xc9\x52\xb7\x20\xf4\xa2\x2d\ +\x8a\xc5\x22\x3c\xb8\x72\x21\x02\x7c\xdd\xf1\xd7\xf8\xdf\xb5\xce\ +\xe3\x00\x80\xbd\x59\x15\xb8\xb0\xe9\x0b\xbc\xf7\xcc\x12\x8c\x1c\ +\x11\x76\xc7\x57\x86\x9f\xce\x15\x01\xe7\x8a\xf4\x5e\x5f\x32\x39\ +\xfc\xb6\xd8\xe4\x68\x88\xac\x66\x1e\x59\x39\xfa\xed\x66\xee\x62\ +\x0a\x0e\x56\xb7\xfe\x1d\x8e\x56\x73\x40\xb5\xfe\xeb\xe3\xdc\x19\ +\xd2\xfb\x10\x86\x3c\xfd\xd2\x92\x5c\x9c\x1d\xb1\x7e\xdd\x4a\x24\ +\xbe\xfa\x00\x66\x05\x3a\x19\x0c\x5b\x90\x9f\x0e\xd9\xd1\xaf\xd1\ +\x92\x9f\x8a\xb4\x92\x12\x83\xe2\x31\xc5\x4e\x84\x1d\x0f\x04\xe3\ +\xf9\xfb\x47\xea\x15\x8f\xbe\x40\x51\x14\x62\x27\xc4\x60\xdb\x5b\ +\x8f\x60\xac\x9b\x6e\x83\x7f\x79\x1b\x8b\x07\xdf\x4b\xc4\x6f\x7b\ +\x8f\xdc\x58\xe6\x3b\x5c\x99\x35\x6d\xec\x90\x7f\x87\x15\xe1\x8c\ +\x41\x17\x24\x03\x81\x94\x06\x26\x04\x0a\x40\x20\x0c\xeb\x11\x08\ +\xcf\xf3\x1a\x73\xe2\xa1\x21\x01\x78\xef\x15\x1f\x2c\x3e\x9b\x8e\ +\x8f\x7e\x3c\x88\x02\xb9\x6e\xe3\x76\x93\x52\x85\x3a\x79\x33\x20\ +\xb2\xd0\x79\xdd\x55\x40\xe1\xa5\x59\x1e\x98\x3c\xc6\x13\xa2\x6e\ +\x47\xc0\xf1\x3c\xa0\x54\xb1\x10\x8b\xfa\xaf\x01\xfa\x78\x7b\xe0\ +\x93\xd7\x1e\xc7\x57\x3f\xec\xc2\x16\x1d\x07\x31\x01\xc0\x2b\xdb\ +\x92\x11\x12\xe8\x83\xe0\x20\x3f\xbd\xf1\xd8\x09\x19\xcc\x89\xd6\ +\x76\xf1\xe2\xec\x34\xf4\xa7\xc0\x3e\x5e\x3b\x1b\xde\x5e\x1e\x3d\ +\x86\x0b\xb1\x91\x62\x54\x90\xe6\x34\x25\x4d\x53\xb0\xb3\xb5\x1e\ +\xf4\x77\x98\xef\x4e\x63\x5c\xa0\x70\xd0\x9f\xe3\xaf\xe3\x85\xb0\ +\x31\x27\x53\x58\x84\x61\x2e\x20\xf9\x05\x25\xb8\x94\x9d\x8f\x59\ +\x71\x13\x61\x6e\xde\xe1\x81\x55\x20\x10\x60\xd2\xc4\xd1\x18\x39\ +\x22\x14\xfb\x93\x4e\xe0\xcd\x1d\x27\x4c\x6b\x5c\xa3\xed\xb1\x78\ +\x9a\x2f\x6c\xad\xb5\x1d\xf6\x55\x54\xcb\xf1\xd3\xc1\x42\x9c\xb9\ +\xda\x82\xed\x9b\x26\x80\xa1\xfb\xaf\x11\x5a\x5a\x5a\x60\xc3\x13\ +\xab\x10\xe4\x97\x8c\x97\xb7\x25\xeb\x0c\x73\x29\x3b\xdf\xa0\x80\ +\x84\xba\x58\x61\xd3\xb3\x0f\xdd\x51\x15\xc4\xdf\x52\x8c\x97\x1e\ +\x5b\x80\x31\x31\x23\x8c\x0a\x3f\x65\x94\x6f\xaf\x9d\x29\xde\xb2\ +\x61\x36\x05\xac\x09\x11\x60\x66\xa4\x70\x50\x47\x1f\x7e\x66\x14\ +\xd6\x8d\x13\xc2\xdf\x85\x4c\x5f\x11\x88\x80\xc0\xdc\x5c\x8a\xef\ +\xf7\x9c\x41\xe2\x81\x54\xac\xbf\x7f\x26\xc6\x8d\x89\xba\xe1\xea\ +\xdb\xc2\xc2\x1c\x4b\x17\xcf\x82\x42\xa9\xc4\xfb\xff\x3b\x67\xdc\ +\xd0\x9e\x02\x1e\x5a\x14\x0a\xba\x9b\x30\x34\xb7\x2a\x71\xe0\x44\ +\x09\x5e\x4e\xee\x72\xa7\xce\x73\x7c\xef\x8c\x1f\x06\x60\x18\x06\ +\x8b\xe6\xc7\x41\x22\x11\xe1\x85\xef\x0e\x69\x5d\x2f\xad\xa8\x19\ +\x76\x15\x24\xd8\xc3\xd6\x68\xf1\xb8\x5d\x61\x00\x4c\x0e\x11\x40\ +\x30\x80\xfd\x76\x9c\x33\x0d\x1f\xdb\x8e\x5d\x31\x12\x21\x05\x37\ +\x5b\x1a\x01\x2e\xcc\x80\x3e\x03\x81\x70\x5b\x0b\x88\x9b\xab\x33\ +\xb6\x7e\xf4\x0c\xfe\x38\x98\x82\x75\xff\xfa\x0d\x73\x82\x4f\xe3\ +\xd1\x55\x73\x11\x14\xe8\xdb\xd5\x78\xe9\xde\xb7\x18\x56\xcd\xe1\ +\x6c\x46\x39\x3e\xdc\x77\x15\xd9\x6d\x03\xe7\x38\x71\x74\x74\x04\ +\x00\x6d\x01\xe1\x06\xc8\x85\xea\xae\x3d\x49\x68\x92\xb7\xc2\xd9\ +\xd1\x16\xf6\x76\xd6\xb0\xb1\xb1\x82\xab\xb3\xa3\xc6\x99\xea\x03\ +\xc5\xbe\xec\x4a\x3c\x5f\x53\x07\x47\x07\xbb\x21\x5b\xc9\x55\x3c\ +\x50\x58\xa5\x46\xa4\xf7\xc0\xd9\x1d\xc6\x7b\x33\x88\xf1\x27\x76\ +\x0e\x02\x11\x10\x0d\x2a\xab\x6a\x34\x6c\x1f\x56\x96\x16\xb8\x6f\ +\xc9\x5c\x4c\x9a\x10\x8d\x6d\xff\x3d\x88\xa5\xaf\xfd\x80\xc7\xe3\ +\xc2\xb0\xfc\xee\x19\x70\xec\xe3\xf2\xd7\xbc\xa2\x3a\x3c\xb2\xb3\ +\xe8\x96\xbd\xbc\x5c\xde\x8c\xe2\xab\x65\xc8\xbd\x52\x82\xe3\xe7\ +\x72\xf0\xf6\xa6\x35\xb0\xb2\xb4\xb8\x71\xf8\xd3\x60\x91\x5f\x5c\ +\x81\x84\x6e\xb6\x98\xc5\x23\x3c\xf0\xe6\xa6\xb5\xb7\x34\xdd\xe5\ +\xa3\xbc\xb1\x23\xad\x44\xeb\xff\xdc\xbc\xc2\x21\x23\x20\x63\x6d\ +\x29\x5c\x6c\xe0\xd1\xed\x2c\x2a\x5c\x2c\x1d\x58\x01\x21\x10\x88\ +\x80\xe8\x60\xd3\x8f\x47\x90\x91\x5d\x8c\xfb\x97\xcd\x82\x87\x7b\ +\x97\xb1\xd4\xdd\xcd\x05\x1b\x9f\x7e\x00\x73\xe3\x72\xf1\xe5\xb6\ +\xfd\x98\xbf\xe1\x33\xbc\xbc\x72\x32\xea\x1b\x7b\xbf\x89\xee\x56\ +\x6d\xf8\xba\x2a\x2b\x43\xfc\x8f\x7b\xb0\x37\xab\x42\x73\xc4\xc3\ +\xde\xbe\xc7\x8c\xd6\x35\xdd\xfa\x8d\x78\xa3\x22\xfd\x91\x57\x5a\ +\xab\x75\xdc\xef\xa9\xd4\x6c\x4c\x9a\x38\x7a\x48\x54\x68\x07\x33\ +\x0a\x53\x24\x14\x0e\x56\x68\x2e\xcb\x4e\x2a\xe3\xb0\x9c\xe5\x21\ +\x12\x10\xe3\x35\x81\xd0\x5f\x98\x6c\x52\xfc\xdb\xe2\x31\x68\x6a\ +\x69\xc3\xbc\x8d\x5f\x62\xfb\x2f\xfb\xd0\xd4\x24\xbf\xa9\xc3\xa7\ +\x10\x19\x11\x82\x4f\xde\x78\x02\xef\xaf\x9d\x83\x2f\x7e\x3d\x81\ +\xf8\xc3\xbd\x3f\xcb\x9b\xea\xf5\xc5\x1e\xbe\xf0\x0b\x65\x5a\xe2\ +\x41\xb8\x6e\x03\x9a\x1a\xa5\xf5\x7f\xc2\xa9\x7c\xc8\x9b\x5b\x86\ +\xc4\x3b\xf0\x3c\x10\xed\xa1\x3d\x6d\x2a\x57\x03\xb2\x1a\x8e\x14\ +\x32\x81\x30\x98\x02\x12\x1a\xe4\x8b\xb7\x36\xad\xc5\xd7\xcf\xde\ +\x83\xdd\xc9\x99\xb8\xff\x85\x7f\xe1\x58\xca\x59\xa8\x54\x5d\x5f\ +\xef\x22\x91\x08\xd3\xa6\x8c\x47\xe2\xc7\xcf\x62\xf9\x28\xef\x5e\ +\x3f\x9c\x3e\xb7\x19\x8f\x06\x5b\x41\xc0\xd0\xa4\xf4\x6e\x01\x11\ +\xa1\xfe\x3a\xff\xbf\x92\x5f\x3c\x64\xde\x21\xd0\x45\x77\xdd\xc8\ +\x2a\x57\x93\x02\x26\x10\x06\x53\x40\x3a\xbf\x54\xc7\x8f\x1d\x89\ +\xef\xfe\xb9\x1e\x6b\x16\x8d\xc7\xfa\xf8\xbd\x78\xe1\x8d\x78\x5c\ +\xce\xba\xa2\x11\xce\xda\xda\x12\x01\x3e\xae\xbd\x7f\x3a\x1d\xfa\ +\xf1\x5a\xac\x13\xd6\x2d\xd5\xdc\x15\x5e\x5e\xaf\x24\x25\xd9\x4f\ +\xf8\x78\x7b\xc0\x52\xa0\x5d\x2d\x52\xd3\x73\x86\xcc\x3b\xd8\x5a\ +\xd0\x88\xb2\xd6\xae\x3c\x47\x4a\x38\x72\x94\x2c\x81\x30\xd8\x02\ +\xd2\x89\xb9\x99\x14\x8b\x17\x4c\xc7\xa1\xcd\x4f\x23\xc0\xd3\x11\ +\x2b\xdf\xde\x8a\x3f\x4f\xa5\xf6\xdb\xc3\xdd\xdc\x05\x3c\x16\x6a\ +\x8d\x23\xeb\x47\x62\xc5\xdc\x60\x98\x4b\xbb\x36\x83\x95\x57\xc9\ +\xb1\xaf\xb4\x95\x94\x64\x3f\x21\x91\x88\xb1\x7a\xb2\xf6\xe1\x55\ +\x89\xc9\x59\x50\x28\x87\x8e\x50\x4f\xd0\x31\x8d\x75\xb5\x9d\x47\ +\x65\x3d\x99\xc6\x22\x10\xfa\x0b\x93\x8d\xe8\x7b\x0e\x9e\x86\xbd\ +\x9d\x0d\xbc\xbd\xba\x0e\x8d\x72\x76\x72\xc0\xd3\x8f\xaf\xc0\xcc\ +\xa9\x05\x68\x69\x6d\xeb\x3f\x01\xa1\x28\x2c\x74\x95\x60\xcd\x1c\ +\x5f\x84\xf8\x3b\x68\x5d\xaf\xaa\x69\xc6\x3f\x13\xb3\xfb\xef\xb0\ +\xa9\xeb\xf1\xf4\xd6\x65\x09\xcf\xf3\x50\xa9\x54\xe8\x7e\x3b\x4d\ +\x53\x10\x08\x04\x3d\xde\xdb\x89\x5a\xcd\xf5\x2a\x6d\x5d\xbf\x7b\ +\xc3\x98\xe8\x10\x7c\x79\xe4\xb2\xc6\x7f\xd5\x0a\x16\x45\x45\x32\ +\x84\x04\xfb\x1b\xbc\x57\xcd\x72\x50\x2a\x55\x3a\x46\xad\xf4\x8d\ +\x3d\x42\xc6\xd0\xa2\xe0\x8d\x1d\x94\xea\x24\xc4\x8d\x06\x2e\x6b\ +\xff\x9f\x57\xa1\x86\x9b\x1d\xdd\x43\x5e\x76\xfd\x26\x23\x16\x02\ +\xa1\x1f\x05\x64\x67\xfa\x55\xec\x4c\xff\x1a\x2f\x2c\x8c\xc1\xa2\ +\x79\x53\x61\x63\xdd\xe5\x43\xaa\xa7\xce\xc5\x54\xdc\x9d\x2d\xf1\ +\xee\xba\x18\x30\xdd\xec\x1d\x6d\xed\x2a\x1c\x3e\x7d\x15\xaf\x1e\ +\x2e\xef\x95\x78\x88\x45\xba\xdd\x59\xfc\x79\xfa\x02\xc6\xc6\x44\ +\x20\xf3\xf2\x15\xdd\x23\x2e\xa9\x61\xd7\xee\x27\x4b\x1b\x10\xf3\ +\xd0\xdb\x3a\xaf\x25\x7f\xfe\x1c\x6c\x6d\xad\xf5\xa6\x7d\xfc\x64\ +\x3a\x9c\x1c\xec\x50\xdf\xd0\x88\xfd\x17\xb4\x97\xd2\xda\x5b\x77\ +\xec\xf4\x17\x09\x75\xdf\x7f\xe8\xc4\x65\x8c\x8c\x0c\x06\x45\x51\ +\x48\x3a\xa9\x7b\xe1\x82\x50\x68\x9c\x1b\x0f\x7f\x5f\x2f\x9d\xff\ +\x5f\xbc\x94\xd7\x63\x19\x7f\x7b\x3c\x07\xdf\x1e\xd7\xce\x03\x4b\ +\x01\x8d\xc3\x5f\x6f\x82\x44\xa2\x3f\x0f\xf3\xea\x79\x1c\xb8\xa8\ +\x82\x82\xe5\xf1\xdf\x7c\xdd\xf6\x0a\x33\x23\x3d\xe9\xba\xd9\x31\ +\x70\x14\x02\xd7\xba\x69\x59\x4a\x89\x1a\x53\xc3\x3b\xf2\x41\xc4\ +\xe8\x8e\x2b\xa5\x88\x85\xbb\x2d\x8d\x16\x05\x8f\x73\x3a\x0c\xef\ +\x52\x0a\x20\x26\x38\x02\xa1\x07\x01\x99\x13\xec\x8c\xfd\xb9\xba\ +\x0f\x8e\xfa\x70\x4f\x2a\xb6\x1c\xbc\x88\x17\x1f\x9c\x81\x49\xb1\ +\x31\x10\x8b\xfa\xdf\xcd\xb7\x99\x54\xb3\xc3\xe3\x38\x1e\xa9\x97\ +\x2b\xb1\x79\x6f\xb1\xde\xf3\x3f\x5c\x05\x14\x2c\xcc\x0c\x77\x94\ +\xae\x2e\x8e\x3a\xff\x7f\x79\xeb\x31\x60\xeb\x31\x03\x1d\x6b\xef\ +\x8f\xea\x55\x5e\x9f\xfe\x71\x71\x76\x34\xd0\xf1\xea\xb7\x33\x8c\ +\x08\xe9\x58\x8c\x60\x61\x61\x8e\x89\x1e\x36\x38\x59\xaa\xe9\xbe\ +\xfe\x84\xac\x1e\x27\xfe\xfe\x9d\xde\xfb\x67\x07\x39\x1b\xec\xbc\ +\x6f\xc6\xce\xce\x06\xf3\xc3\x5c\xb5\x56\xaa\xfd\x76\x2c\x03\x4b\ +\x17\xcf\xd6\x12\x74\x63\x90\xb3\x1c\xd4\x6a\xc3\x46\xec\x9c\x66\ +\x1e\x39\x59\xfa\x97\x52\xfb\x48\x29\xd8\x5b\x1a\x97\x36\x43\x03\ +\x71\x1e\x0c\x12\x8b\x34\xd3\x4c\x6f\xe4\x51\xdf\xcc\xc1\xd6\x82\ +\x86\xbd\x15\x05\x21\xd5\xb1\xd1\xf0\x66\x4e\xd5\xf2\x38\x75\x48\ +\xff\x21\x65\x93\x9c\x89\x7a\x10\x08\x40\x0f\x36\x90\x77\xfe\xfe\ +\x38\xbe\x5c\x7f\x37\x02\xad\x75\x6f\xac\xab\x56\xb0\xd8\xf0\xcd\ +\x7e\x3c\xfb\xca\x17\xc8\xb8\x94\x73\x4b\xbd\xd5\x16\x95\x36\xe0\ +\xcd\xef\xd2\xf0\x60\x62\xbe\x5e\xf1\xd8\x38\xda\x1e\x3b\x37\xc4\ +\xc0\xcb\xcd\xb0\xe3\x3e\x6f\x2f\x77\x4c\xd7\x31\x25\x66\x08\x09\ +\x4d\x61\xe4\x88\x90\x3e\xbf\x87\xbd\xbd\x2d\xd6\xdc\x65\xfa\xe9\ +\x84\x63\x47\x45\xdc\xf8\x7d\xdf\x82\x89\x26\xdf\xbf\x68\x96\x69\ +\x9e\x74\xe3\x26\x6a\xbb\x2f\xb9\x54\xdb\x8a\xd2\xb2\xf2\x41\xab\ +\xac\xcb\xc2\x04\x30\xc5\x9b\x7c\x84\xbb\xee\xc0\xf9\x95\x1d\xa3\ +\x0a\xa9\x88\xc2\x3d\x3e\xa6\x7b\x4a\x88\xf5\x23\xfe\x48\x08\x84\ +\x1e\x05\xa4\xf3\xe0\xa8\x1f\x3f\x7c\x06\xaf\xaf\x98\xa4\x37\xdc\ +\x09\x59\x3d\xee\x7f\xe7\x27\xfc\xf3\x5f\x5b\x50\x56\x5e\xd9\xaf\ +\x0f\x58\xdf\xd8\x86\x1f\x7e\xcb\xc2\xbc\x2f\x33\x91\x58\xa2\xdb\ +\x58\x7e\x9f\xb7\x19\xf6\xad\x8b\xc4\x9a\xc5\x61\x3a\x9d\x30\x6a\ +\x7d\x9d\x32\x0c\x9e\x7a\xf8\x6e\x48\x4c\xf0\xa5\xf5\xe6\x43\xd3\ +\x61\x6f\x67\xdb\x2f\xef\xb4\x6a\xe9\x6c\xf8\x58\x18\x3f\x62\x7b\ +\x75\xf9\x44\x78\x7a\x76\x1d\x07\x1b\x3b\x3e\x1a\x0b\xc2\x8d\x5f\ +\xdd\xb6\x30\xc2\x0d\xe3\xc7\x44\x99\xf4\x8c\x21\x81\x3e\x3a\xff\ +\xbf\x94\x55\x30\x28\x15\x75\xae\x1b\x8d\x31\x01\xa6\xcd\xb8\xfa\ +\x3a\x31\x3a\x4f\xfe\x3b\x2f\xeb\x1a\x95\xcc\x1d\x21\x84\x9b\x09\ +\x07\x4c\xcd\x70\xa1\x11\xea\x49\x76\xb4\x13\x08\x3d\x0a\x48\x27\ +\x16\xe6\x66\xb8\x77\xd1\x4c\x1c\xf8\xf8\x49\x3c\x3c\x29\x48\x6f\ +\xb8\x6d\x67\x0a\x31\xf7\x85\x78\x24\xee\xfc\x03\x72\x79\xdf\x8e\ +\x71\x55\x28\xd5\x48\x3a\x59\x8c\x25\x9f\xa4\xe2\xfd\x73\xb5\x3a\ +\xc3\x8c\xb6\x14\x20\x61\x45\x00\x5e\x7d\x64\x14\x7c\x3d\x6c\x4c\ +\x8a\xdf\xdf\xcf\x0b\x5f\x6d\x5c\xa6\x73\xc9\xaa\x96\x78\xac\x9c\ +\x8c\x59\xd3\x63\xfb\x2d\xd3\x9d\x1c\xed\xf1\xe9\x4b\x0f\x22\xcc\ +\xce\xac\xc7\xb0\x2f\xde\x3b\x0e\xf7\x2c\x9c\xa1\xf1\x9f\x48\x24\ +\xc2\x5f\x9f\x5a\x85\x85\x11\x3d\x9f\x31\xbe\x34\xda\x0b\x7f\x7b\ +\x7a\xb5\xd1\xf6\x8f\x4e\xdc\xdc\x5c\x74\x3e\x5f\xd2\x89\x8c\x3e\ +\x8f\x34\x4d\xbd\x7b\x4d\x08\x83\xbf\x4c\x12\x9b\x6c\x77\x90\x8a\ +\x29\xdc\xe5\xa4\x7d\x53\x72\x15\x87\xd6\xeb\x46\x7a\x2b\x33\x0a\ +\x2f\x4d\x13\xc1\x4b\xd2\xb3\x88\xcc\x72\xa5\xf1\x70\xac\xa8\xbf\ +\x7d\x78\x12\x08\x43\x16\x93\x3e\xa5\x5c\x5d\x9c\xb0\xe1\x89\xd5\ +\x98\x3d\xed\x0a\xbe\xda\xb6\x1f\x47\x0b\x75\x7b\xa7\xfd\xc7\xce\ +\xd3\xd8\x76\x20\x0d\xcf\x3f\x38\x13\x6d\xed\x0a\xd3\x3a\x17\x9e\ +\x47\x46\xee\x35\x7c\xb1\xb7\x08\xc9\x75\xba\x97\x8d\xda\xd0\x14\ +\x5e\x99\xee\x86\x69\xe3\xbc\x20\x95\xf4\xfe\x6b\x30\x3a\x2a\x1c\ +\xff\xfb\xd8\x05\xc7\x52\xce\x63\xd7\x91\x74\x5c\xaa\xed\x1a\xe1\ +\xd8\x09\x19\xac\x9a\x1a\x86\x19\x53\xc7\xc2\x4f\x87\x51\xf9\x9e\ +\xd9\xe3\x30\xbe\xca\x38\xef\xbc\x02\x86\x81\x95\xb5\xe6\x81\x58\ +\xbe\x3e\x9e\xf8\xe6\xbd\xa7\x70\xea\xcc\x05\x1c\x4a\xc9\xc0\x81\ +\xbc\x2e\x5b\x93\xa7\x99\x10\x8b\x27\x85\x62\xea\x5d\x31\x08\xf4\ +\xd7\x3d\x12\xb0\xb1\xb6\xc2\xeb\x1b\x1f\xc1\x82\xb4\x4c\x1c\x38\ +\x96\x8a\x5f\x2f\xca\xb4\x84\x63\xf6\xd4\x18\xc4\x44\x47\x42\xd0\ +\xcd\x05\xac\xb5\x95\x25\x36\x2e\x1c\x0d\xb6\x9b\x4d\xc2\xdf\xd7\ +\xe3\xa6\x51\x1a\x8d\x97\xd6\xdd\xad\xb5\xff\x83\x61\x68\x70\x1c\ +\x07\x86\x61\xf0\xdc\xca\x29\x46\x9f\xf7\x2e\x16\x89\x6e\xd8\x60\ +\x04\x34\xb0\x2a\x44\x80\x66\x85\x7e\x29\x61\x28\xc0\xd9\x8a\x42\ +\xb0\x1b\xa3\xd3\xee\x41\x53\xc0\xbd\x01\x0c\x1a\xba\xad\xa2\x08\ +\x75\xd5\x7c\xd7\x45\x23\x04\xb0\xc9\x53\x6b\x88\x1e\x45\x51\x50\ +\xb1\x00\xae\x9b\x84\x5c\x6d\x69\xbc\x33\x5f\x8c\x33\x57\x58\x1c\ +\x29\x52\xe3\xb2\x5c\x33\xce\x29\x8e\x34\xa6\x06\x32\x88\xf4\x12\ +\x68\xb9\xd7\x71\xb4\xa2\xb1\xca\x9f\xd1\x12\x45\x77\x3b\x62\x27\ +\x21\xdc\xf9\x50\xf1\xf1\xf1\xfc\x8c\x05\xab\x4c\xbe\x51\xa5\x62\ +\x71\xf2\x4c\x1a\x3e\xda\x72\x08\xc5\xcd\xa6\xef\x0f\x60\x3d\xc6\ +\x83\xef\x76\xa0\x94\x94\x02\x96\xf9\x5b\x60\x4b\xbe\xfe\x4e\xe9\ +\xd9\x91\xb6\x58\x32\xdd\x0f\x0e\xb6\x66\xfd\x9e\x19\x0a\xa5\xf2\ +\xc6\x1a\x4e\xa1\x48\x04\x9a\x1a\xb8\x4f\x4d\x8e\xe3\xc0\x71\x3c\ +\x28\x0a\x26\x2d\x77\xed\x44\xad\xee\x32\x52\x33\x0c\xd3\x2b\x43\ +\x37\xe1\x7a\xdd\x54\x77\x2d\xdf\x15\x30\x20\x23\x0e\x02\x41\x07\ +\x49\xbf\x6f\xef\xbd\x3b\x77\xa1\x50\x80\x29\x93\xc6\x22\x3a\x2a\ +\x0c\x7f\x1c\x4c\xc1\x3f\x76\x9e\xee\xf3\x03\xb5\xf1\xd0\x2b\x1e\ +\xf7\x78\x48\xb1\x66\xae\x1f\x02\xbc\x6f\x9d\x57\xd8\x5b\xb1\x92\ +\xcc\x58\x68\x9a\x46\x5f\x8e\x1b\xef\xd8\x67\x41\x44\xa3\x5f\x86\ +\xe5\xc4\x46\x4e\x20\x18\xd7\x56\xfa\x1a\x41\x77\x77\xee\x5b\x4f\ +\xf7\xaf\x91\x75\x84\x19\x83\x0d\xf3\xbc\x31\x26\xd2\x95\x74\x90\ +\x04\x02\x81\x70\x27\x09\x48\x27\x9d\xee\xdc\xe7\xc4\xe5\x22\x3e\ +\x61\x3f\x4e\xc8\xea\xfb\x14\x9f\x94\x02\x5e\x9b\xe6\x8a\x19\x13\ +\xbc\x35\x5c\x97\x10\x08\x04\x02\xe1\x0e\x13\x10\xa0\xc3\x38\x39\ +\x22\x22\x04\x9f\xbe\xe5\x87\x94\x93\xa9\xf8\x20\x21\x09\xe5\x6d\ +\xa6\x9f\xb1\xf1\x44\x84\x0d\xee\x9b\xe1\x07\x27\x07\x73\x52\x42\ +\x04\x02\x81\x30\x1c\x04\xa4\x13\x91\x48\x84\xe9\x53\x27\x20\x26\ +\x3a\x1c\x7b\xf6\x25\xe3\x83\x3d\xe7\x8d\xba\x6f\x9e\x8b\x04\x6b\ +\xe7\xea\xf6\x7b\x45\x20\x10\x08\x84\x61\x20\x20\x9d\xd8\x58\x5b\ +\xe1\x81\x95\x0b\x31\x65\xd2\x28\x6c\x49\x3c\xa0\xf3\xb8\x54\x00\ +\x08\x95\x32\x78\x6e\x8e\x17\xc6\x47\xb9\x41\x20\x20\x76\x0e\x02\ +\x81\x40\x18\xf6\x02\xd2\x89\x97\xa7\x3b\x5e\x7a\xee\x2f\x98\x9f\ +\x99\x8d\xcf\x13\xf6\xe3\x6c\x79\xd3\x8d\x6b\x6f\x4d\x76\xc1\x9c\ +\x58\x6f\x58\x98\x8b\x48\x69\x10\x08\x04\x02\x11\x10\x6d\x68\x9a\ +\x46\x74\x54\x38\xfe\x1d\x1c\x80\x63\x29\x67\x91\x53\x29\xc4\x7d\ +\xb3\xc3\xe1\xea\x64\x49\x4a\x81\x40\x20\x10\x88\x80\xf4\x8c\x44\ +\x22\xc6\x9c\x99\x77\x61\x0e\xc9\x7b\x02\x81\x40\x18\xd2\x10\x83\ +\x03\x81\x40\x20\x10\x88\x80\x10\x08\x04\x02\x61\xe0\x10\x00\x1d\ +\x3e\x4d\x08\x04\x82\xf1\x88\x25\x66\x70\xf1\xf0\xbd\x2d\x9e\x45\ +\xa9\x68\x47\x85\xac\x80\x14\x0a\x61\xc0\xf9\x7f\xad\xdc\xa8\xbc\ +\x93\x7c\x94\x6b\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ +\ +" + +qt_resource_name = b"\ +\x00\x06\ +\x07\x03\x7d\xc3\ +\x00\x69\ +\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\ +\x00\x06\ +\x07\xaa\x8a\xf3\ +\x00\x73\ +\x00\x74\x00\x61\x00\x74\x00\x69\x00\x63\ +\x00\x13\ +\x0b\xa6\xbd\x87\ +\x00\x73\ +\x00\x65\x00\x63\x00\x75\x00\x72\x00\x65\x00\x64\x00\x72\x00\x6f\x00\x70\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\ +\x00\x6e\x00\x67\ +\x00\x0a\ +\x04\xc8\x47\xe7\ +\x00\x62\ +\x00\x61\x00\x6e\x00\x6e\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\ +" + +qt_resource_struct_v1 = b"\ +\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ +\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ +\x00\x00\x00\x12\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\ +\x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x14\x45\ +\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ +" + +qt_resource_struct_v2 = b"\ +\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ +\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ +\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x12\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\ +\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x14\x45\ +\x00\x00\x01\x6d\x2b\xe8\x61\xe5\ +\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ +\x00\x00\x01\x6d\x2b\x7b\x43\xf7\ +" + +qt_version = QtCore.qVersion().split('.') +if qt_version < ['5', '8', '0']: + rcc_version = 1 + qt_resource_struct = qt_resource_struct_v1 +else: + rcc_version = 2 + qt_resource_struct = qt_resource_struct_v2 + +def qInitResources(): + QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + +def qCleanupResources(): + QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + +qInitResources() diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py new file mode 100644 --- /dev/null +++ b/journalist_gui/journalist_gui/strings.py @@ -0,0 +1,48 @@ +window_title = 'SecureDrop Workstation Updater' +update_in_progress = ("SecureDrop workstation updates are available! " + "It is recommended to install them now. \n\n" + "If you don\'t want to install them now, " + "you can install them the next time you reboot.\n\n" + "You will need to have set a Tails Administration " + "password in " + "the Tails Greeter on boot to complete the update.\n\n" + "When you start your workstation, this window will " + "automatically appear if you have not " + "completed any required updates.\n") +fetching_update = ('Fetching and verifying latest update...' + ' (5 mins remaining)') +updating_tails_env = ('Configuring local Tails environment...' + ' (1 min remaining)') +finished = 'Update successfully completed!' +finished_dialog_message = 'Updates completed successfully. ' +finished_dialog_title = 'SecureDrop Workstation is up to date!' +missing_sudo_password = 'Missing Tails Administrator password' +update_failed_dialog_title = 'Error Updating SecureDrop Workstation' +update_failed_generic_reason = ("Update failed. " + "Please contact your SecureDrop " + "administrator.") +update_failed_sig_failure = ("WARNING: Signature verification failed. " + "Contact your SecureDrop administrator " + "or [email protected] immediately.") +tailsconfig_failed_sudo_password = ('Administrator password incorrect. ' + 'Exiting upgrade - ' + 'click Update Now to try again.') +tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. " + "Contact your administrator. " + "If you are an administrator, contact " + "[email protected].") +install_update_button = 'Update Now' +install_later_button = 'Update Later' +sudo_password_text = ("Enter the Tails Administrator password you " + "entered in the Tails Greeter.\n If you did not " + "set an Administrator password, click Cancel " + "and reboot. ") +main_tab = 'Updates Available' +output_tab = 'Detailed Update Progress' +initial_text_box = ("When the update begins, this area will populate with " + "output.\n") +doing_setup = "Checking dependencies are up to date... (2 mins remaining)" +no_password_set_message = ("The Tails Administration Password was not set.\n\n" + "Please reboot and set a password before updating " + "SecureDrop.") +app_is_already_running = " is already running." diff --git a/journalist_gui/journalist_gui/updaterUI.py b/journalist_gui/journalist_gui/updaterUI.py new file mode 100644 --- /dev/null +++ b/journalist_gui/journalist_gui/updaterUI.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- + +# Form implementation generated from reading ui file 'journalist_gui/mainwindow.ui' +# +# Created by: PyQt5 UI code generator 5.10 +# +# WARNING! All changes made in this file will be lost! + +from PyQt5 import QtCore, QtGui, QtWidgets + + +class Ui_MainWindow(object): + def setupUi(self, MainWindow): + MainWindow.setObjectName("MainWindow") + MainWindow.resize(400, 500) + sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) + MainWindow.setSizePolicy(sizePolicy) + MainWindow.setMinimumSize(QtCore.QSize(400, 500)) + MainWindow.setMaximumSize(QtCore.QSize(400, 500)) + self.centralwidget = QtWidgets.QWidget(MainWindow) + self.centralwidget.setObjectName("centralwidget") + self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.centralwidget) + self.verticalLayout_3.setObjectName("verticalLayout_3") + self.label_2 = QtWidgets.QLabel(self.centralwidget) + sizePolicy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed + ) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth()) + self.label_2.setSizePolicy(sizePolicy) + self.label_2.setText("") + self.label_2.setPixmap(QtGui.QPixmap("static/banner.png")) + self.label_2.setScaledContents(True) + self.label_2.setObjectName("label_2") + self.verticalLayout_3.addWidget(self.label_2) + self.tabWidget = QtWidgets.QTabWidget(self.centralwidget) + self.tabWidget.setObjectName("tabWidget") + self.tab = QtWidgets.QWidget() + self.tab.setObjectName("tab") + self.verticalLayout = QtWidgets.QVBoxLayout(self.tab) + self.verticalLayout.setObjectName("verticalLayout") + self.label = QtWidgets.QLabel(self.tab) + sizePolicy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred + ) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth()) + self.label.setSizePolicy(sizePolicy) + self.label.setTextFormat(QtCore.Qt.PlainText) + self.label.setScaledContents(False) + self.label.setWordWrap(True) + self.label.setObjectName("label") + self.verticalLayout.addWidget(self.label) + self.tabWidget.addTab(self.tab, "") + self.tab_2 = QtWidgets.QWidget() + self.tab_2.setObjectName("tab_2") + self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.tab_2) + self.verticalLayout_2.setObjectName("verticalLayout_2") + self.plainTextEdit = QtWidgets.QPlainTextEdit(self.tab_2) + self.plainTextEdit.setReadOnly(True) + self.plainTextEdit.setObjectName("plainTextEdit") + self.verticalLayout_2.addWidget(self.plainTextEdit) + self.tabWidget.addTab(self.tab_2, "") + self.verticalLayout_3.addWidget(self.tabWidget) + self.progressBar = QtWidgets.QProgressBar(self.centralwidget) + self.progressBar.setProperty("value", 24) + self.progressBar.setObjectName("progressBar") + self.verticalLayout_3.addWidget(self.progressBar) + self.horizontalLayout_2 = QtWidgets.QHBoxLayout() + self.horizontalLayout_2.setObjectName("horizontalLayout_2") + self.pushButton = QtWidgets.QPushButton(self.centralwidget) + self.pushButton.setObjectName("pushButton") + self.horizontalLayout_2.addWidget(self.pushButton) + self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget) + self.pushButton_2.setObjectName("pushButton_2") + self.horizontalLayout_2.addWidget(self.pushButton_2) + self.verticalLayout_3.addLayout(self.horizontalLayout_2) + MainWindow.setCentralWidget(self.centralwidget) + self.menubar = QtWidgets.QMenuBar(MainWindow) + self.menubar.setGeometry(QtCore.QRect(0, 0, 400, 22)) + self.menubar.setObjectName("menubar") + MainWindow.setMenuBar(self.menubar) + self.statusbar = QtWidgets.QStatusBar(MainWindow) + self.statusbar.setObjectName("statusbar") + MainWindow.setStatusBar(self.statusbar) + + self.retranslateUi(MainWindow) + self.tabWidget.setCurrentIndex(0) + QtCore.QMetaObject.connectSlotsByName(MainWindow) + + def retranslateUi(self, MainWindow): + _translate = QtCore.QCoreApplication.translate + MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) + self.label.setText( + _translate( + "MainWindow", + ( + "SecureDrop workstation updates are available! You should install them now. " + "If you don't want to, you can install them the next time your system boots." + ), + ) + ) + self.tabWidget.setTabText( + self.tabWidget.indexOf(self.tab), _translate("MainWindow", "SecureDrop") + ) + self.tabWidget.setTabText( + self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Command Output") + ) + self.pushButton.setText(_translate("MainWindow", "Install Later")) + self.pushButton_2.setText(_translate("MainWindow", "Install Now")) diff --git a/testinfra/conftest.py b/molecule/testinfra/staging/conftest.py similarity index 57% rename from testinfra/conftest.py rename to molecule/testinfra/staging/conftest.py --- a/testinfra/conftest.py +++ b/molecule/testinfra/staging/conftest.py @@ -6,12 +6,14 @@ Vars should be placed in `testinfra/vars/<hostname>.yml`. """ +import io import os import yaml -target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] -assert target_host != "" +# The config tests target staging by default. It's possible to override +# for e.g. prod, but the associated vars files are not yet ported. +target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging') def securedrop_import_testinfra_vars(hostname, with_header=False): @@ -23,18 +25,26 @@ def securedrop_import_testinfra_vars(hostname, with_header=False): Vars must be stored in `testinfra/vars/<hostname>.yml`. """ filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml") - with open(filepath, 'r') as f: + with io.open(filepath, 'r') as f: hostvars = yaml.safe_load(f) - # The directory Travis runs builds in varies by PR, so we cannot hardcode - # it in the YAML testvars. Read it from env var and concatenate. - if hostname.lower() == 'travis': - build_env = os.environ["TRAVIS_BUILD_DIR"] - hostvars['securedrop_code'] = build_env+"/securedrop" if with_header: hostvars = dict(securedrop_test_vars=hostvars) + return hostvars +def lookup_molecule_info(): + """ + Molecule automatically writes YAML files documenting dynamic host info + such as remote IPs. Read that file and pass back the config dict. + """ + molecule_instance_config_path = os.path.abspath( + os.environ['MOLECULE_INSTANCE_CONFIG']) + with open(molecule_instance_config_path, 'r') as f: + molecule_instance_config = yaml.safe_load(f) + return molecule_instance_config + + def pytest_namespace(): return securedrop_import_testinfra_vars(target_host, with_header=True) diff --git a/molecule/vagrant-packager/package.py b/molecule/vagrant-packager/package.py new file mode 100755 --- /dev/null +++ b/molecule/vagrant-packager/package.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python3 +# +# +# +# +import hashlib +import json +import os +from os.path import join +import re +import shutil +import subprocess +import tarfile +import xml.etree.ElementTree as ET + + +SCENARIO_NAME = os.path.basename(os.path.dirname(os.path.abspath(__file__))) +SCENARIO_PATH = os.path.dirname(os.path.realpath(__file__)) +BOX_BUILD_DIR = join(SCENARIO_PATH, "build") +BOX_METADATA_DIR = join(SCENARIO_PATH, "box_files") +EPHEMERAL_DIRS = {} +TARGET_VERSION_FILE = os.path.join(SCENARIO_PATH, os.path.pardir, "shared", "stable.ver") + + +class LibVirtPackager(object): + + def __init__(self, vm): + # type: (str) -> None + self.cli_prefix = "virsh --connect qemu:///system {}" + self.vm_name = vm + + def _get_virsh_xml(self, cmd): + # type: (str) -> ET.Element + virsh_cmd_str = self.cli_prefix.format(cmd) + cmd_output = subprocess.check_output(virsh_cmd_str.split()) + return ET.fromstring(cmd_output) + + def vm_xml(self): + # type: () -> ET.Element + """ Get XML definition for virtual machine domain + """ + return self._get_virsh_xml("dumpxml "+self.vm_name) + + def default_image_location(self): + # type: () -> str + """ + Get full system path to the default system pool dir + """ + pool = self._get_virsh_xml("pool-dumpxml default") + + return pool.findall('./target/path')[0].text + + def image_rebase(self, img_location): + # type: (str) -> None + """ If an image has a backing store, merge the backing store into the + target image file + """ + if self.default_image_location() in img_location: + raise UserWarning("To prevent catastrophy, will not" + " run on image in the default pool. Copy it" + " first to someplace else") + + img_info = subprocess.check_output(["qemu-img", "info", img_location]) + rebase_cmd = """qemu-img rebase -b "" {}""".format(img_location) + + if "backing file:" in img_info.decode('utf-8'): + print("Running rebase now..") + subprocess.check_call(rebase_cmd, shell=True) + else: + print("No rebase necessary") + + def image_store_path(self): + # type: () -> str + """ Get location of VM's first storage disk file """ + vm_xml = self.vm_xml() + return vm_xml.findall('./devices/disk/source')[0].attrib['file'] + + def image_sparsify(self, src, dest, tmp_dir, inplace): + # type: (str, str, str, bool) -> None + """ Remove blank-space from the image. Note that setting inplace to + false will get you better bang for your buck but can make system + unstable. Pushed IO to the max on my machine and caused it to crash + """ + img_info = subprocess.check_output(["qemu-img", "info", src]) + + if "backing file:" in img_info.decode('utf-8'): + raise UserWarning("Cannot sparsify image w/ backing " + "store. Please rebase first.") + + if inplace: + subprocess.check_call(["virt-sparsify", + "--in-place", + src]) + shutil.move(src, dest) + else: + subprocess.check_call(["virt-sparsify", "--tmp", + tmp_dir, + src, + dest]) + + def sysprep(self, img_location): + # type: (str) -> None + """ Run the virt-sysprep tool over the image to prep the log for + re-dist. Removes things like logs and user history files + """ + sysprep_cmd = ("virt-sysprep --no-logfile --operations " + "defaults,-ssh-userdir,-ssh-hostkeys,-logfiles -a " + + img_location) + subprocess.check_call(sysprep_cmd.split()) + + def vagrant_metadata(self, img_location): + # type: (str) -> dict + """ Produce dictionary of necessary vagrant key/values """ + json = {} + + info_output = subprocess.check_output(["qemu-img", "info", + img_location]).decode('utf-8') + json['virtual_size'] = int((re.search(r"virtual size: (?P<size>\d+)G", + info_output)).group("size")) + + json['format'] = (re.search(r"file format: (?P<format>\w+)", + info_output)).group("format") + json['provider'] = 'libvirt' + + return json + + +def main(): + with open(TARGET_VERSION_FILE, 'r') as f: + TARGET_VERSION = f.read().strip() + + # Default to Xenial as base OS. + TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM", "xenial") + + for srv in ["app-staging", "mon-staging"]: + + for temp_dir in ["build", "tmp"]: + try: + ephemeral_path = join(SCENARIO_PATH, ".molecule", + temp_dir) + EPHEMERAL_DIRS[temp_dir] = ephemeral_path + + os.makedirs(os.path.join(SCENARIO_PATH, ".molecule", temp_dir)) + except OSError: + pass + + vm = LibVirtPackager("{}_{}".format(SCENARIO_NAME, srv)) + + tmp_img_file = join(EPHEMERAL_DIRS["tmp"], "wip.img") + packaged_img_file = join(EPHEMERAL_DIRS["build"], "box.img") + + print("Copying VM image store locally") + subprocess.check_output(["sudo", "cp", + vm.image_store_path(), # source + tmp_img_file # dest + ]) + + print("Changing file ownership") + subprocess.check_output(["sudo", "chown", os.environ['USER'], + tmp_img_file]) + + # Run a sysprep on it + print("Run an image sysprep") + vm.sysprep(tmp_img_file) + + print("Rebase Image") + vm.image_rebase(tmp_img_file) + + # Sparsify the image file + print("Run sparsi-fication on the image") + vm.image_sparsify(src=tmp_img_file, + dest=packaged_img_file, + tmp_dir=EPHEMERAL_DIRS['tmp'], + inplace=True) + + # Write out metadata file + with open(join(EPHEMERAL_DIRS['build'], 'metadata.json'), + 'w') as mdata: + json.dump( + vm.vagrant_metadata(packaged_img_file), + mdata) + + # Copy in appropriate vagrant file to build dir + shutil.copyfile(join(BOX_METADATA_DIR, "Vagrantfile."+srv), + join(EPHEMERAL_DIRS['build'], 'Vagrantfile')) + + print("Creating tar file") + box_file = join(BOX_BUILD_DIR, "{}-{}_{}.box".format(srv, TARGET_PLATFORM, TARGET_VERSION)) + with tarfile.open(box_file, "w|gz") as tar: + for boxfile in ["box.img", "Vagrantfile", "metadata.json"]: + tar.add(join(EPHEMERAL_DIRS["build"], boxfile), + arcname=boxfile) + + print("Box created at {}".format(box_file)) + + print("Updating box metadata") + update_box_metadata(srv, box_file, TARGET_PLATFORM, TARGET_VERSION) + + print("Clean-up tmp space") + shutil.rmtree(EPHEMERAL_DIRS['tmp']) + + +def sha256_checksum(filepath): + """ + Returns a SHA256 checksum for a given filepath. + """ + checksum = hashlib.sha256() + with open(filepath, 'rb') as f: + # Read by chunks, to avoid slurping the entire file into memory. + # Box files range from 500MB to 1.5GB. + for block in iter(lambda: f.read(checksum.block_size), b''): + checksum.update(block) + return checksum.hexdigest() + + +def update_box_metadata(server_name, box_file, platform, version): + """ + Updates the JSON file of Vagrant box metadata, including remote URL, + version number, and SHA256 checksum. + """ + # Strip off "staging" suffix from box names + server_name_short = re.sub(r'\-staging$', '', server_name) + json_file_basename = "{}_{}_metadata.json".format(server_name_short, platform) + json_file = os.path.join(BOX_METADATA_DIR, json_file_basename) + + # Read in current JSON metadata, so we can append the new info to it. + with open(json_file, "r") as f: + metadata_config = json.loads(f.read()) + + base_url = "https://dev-bin.ops.securedrop.org/vagrant" + box_name = os.path.basename(box_file) + box_url = "{}/{}".format(base_url, box_name) + box_checksum = sha256_checksum(box_file) + box_config = dict( + name="libvirt", + url=box_url, + checksum_type="sha256", + checksum=box_checksum, + ) + # Creating list of dicts to adhere to JSON format of Vagrant box metadata + providers_list = [] + providers_list.append(box_config) + version_config = dict( + version=version, + providers=providers_list, + ) + box_versions = metadata_config['versions'] + box_versions.append(version_config) + metadata_config['versions'] = box_versions + + # Write out final, modified data. Does not validate for uniqueness, + # so repeated runs on the same version will duplicate version info, + # which'll likely break the box fetching. Target file is version-controlled, + # though, so easy enough to correct in the event of a mistake. + with open(json_file, "w") as f: + f.write(json.dumps(metadata_config, indent=2, sort_keys=True)) + + +if __name__ == "__main__": + main() diff --git a/molecule/vagrant-packager/scripts/list_bucket.py b/molecule/vagrant-packager/scripts/list_bucket.py new file mode 100755 --- /dev/null +++ b/molecule/vagrant-packager/scripts/list_bucket.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# +# +# +# +# Generate index.html of vagrant box files in our s3 bucket +# and upload said file. + +import boto3 +import os + + +class S3_Bucket_IndexCreator(object): + """ Class to initialize s3 bucket connection, grab contents, publish index """ + + def __init__(self, bucket, path): + self.s3 = boto3.resource('s3') + self.vagrant_bucket = self.s3.Bucket(name=bucket) + self.bucket = bucket + self.path = path + + def bucket_get_list(self): + """ Get bucket file listings and return python list """ + + return [obj.key.split('/')[-1] for obj in + self.vagrant_bucket.objects.filter(Prefix=self.path) if + "index.html" not in obj.key] + + def generate_html_index(self): + """Build a simple HTML index string from bucket listings""" + + str_files = ["""<a href="{file}">{file}</a>""".format(file=f) for f in + self.bucket_get_list()] + + index_string = """ + <html><head><title>Index of /{path}/</title></head><body bgcolor="white"> + <h1>Index of /{path}/</h1><hr><pre>{files}</pre><hr></body></html>""".format( + path=self.path, + files="<br>".join(str_files) + ) + + return index_string + + def upload_string_as_file(self, + contents, + filename="index.html", + content_type="text/html"): + + """ Take contents of a file as input and dump that to a file """ + object = self.s3.Object(self.bucket, '{}/{}'.format(self.path, filename)) + object.put(Body=contents, + ContentType=content_type) + + +if __name__ == "__main__": + BUCKET = os.environ.get('BUCKET', 'dev-bin.ops.securedrop.org') + BUCKET_PATH = os.environ.get('BUCKET_PATH', 'vagrant/') + + bucket_index_creation = S3_Bucket_IndexCreator(BUCKET, BUCKET_PATH) + index = bucket_index_creation.generate_html_index() + bucket_index_creation.upload_string_as_file(index) diff --git a/securedrop/alembic/env.py b/securedrop/alembic/env.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/env.py @@ -0,0 +1,82 @@ +from __future__ import with_statement + +import os +import sys + +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig +from os import path + +config = context.config + +fileConfig(config.config_file_name) + +# needed to import local modules +sys.path.insert(0, path.realpath(path.join(path.dirname(__file__), '..'))) +from db import db # noqa + +try: + # These imports are only needed for offline generation of automigrations. + # Importing them in a prod-like environment breaks things. + from journalist_app import create_app # noqa + from sdconfig import config as sdconfig # noqa + + # App context is needed for autogenerated migrations + create_app(sdconfig).app_context().push() +except Exception: + # Only reraise the exception in 'dev' where a developer actually cares + if os.environ.get('SECUREDROP_ENV') == 'dev': + raise + + +target_metadata = db.Model.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/securedrop/alembic/versions/15ac9509fc68_init.py b/securedrop/alembic/versions/15ac9509fc68_init.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/15ac9509fc68_init.py @@ -0,0 +1,93 @@ +"""init + +Revision ID: 15ac9509fc68 +Revises: +Create Date: 2018-03-30 21:20:58.280753 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "15ac9509fc68" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + ) + op.create_table( + "sources", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("filesystem_id", sa.String(length=96), nullable=True), + sa.Column("journalist_designation", sa.String(length=255), nullable=False), + sa.Column("flagged", sa.Boolean(), nullable=True), + sa.Column("last_updated", sa.DateTime(), nullable=True), + sa.Column("pending", sa.Boolean(), nullable=True), + sa.Column("interaction_count", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("filesystem_id"), + ) + op.create_table( + "journalist_login_attempt", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("timestamp", sa.DateTime(), nullable=True), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "source_stars", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("starred", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "submissions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("downloaded", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + ) + + +def downgrade(): + op.drop_table("submissions") + op.drop_table("source_stars") + op.drop_table("replies") + op.drop_table("journalist_login_attempt") + op.drop_table("sources") + op.drop_table("journalists") diff --git a/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py @@ -0,0 +1,56 @@ +"""added passphrase_hash column to journalists table + +Revision ID: 2d0ce3ee5bdc +Revises: fccf57ceef02 +Create Date: 2018-06-08 15:08:37.718268 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "2d0ce3ee5bdc" +down_revision = "fccf57ceef02" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column("journalists", sa.Column("passphrase_hash", sa.String(length=256), nullable=True)) + + +def downgrade(): + # sqlite has no `drop column` command, so we recreate the original table + # then load it from a temp table + + op.rename_table("journalists", "journalists_tmp") + + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + ) + + conn = op.get_bind() + conn.execute( + """ + INSERT INTO journalists + SELECT id, username, pw_salt, pw_hash, is_admin, otp_secret, is_totp, + hotp_counter, last_token, created_on, last_access + FROM journalists_tmp + """ + ) + + op.drop_table("journalists_tmp") diff --git a/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/3d91d6948753_create_source_uuid_column.py @@ -0,0 +1,71 @@ +"""Create source UUID column + +Revision ID: 3d91d6948753 +Revises: faac8092c123 +Create Date: 2018-07-09 22:39:05.088008 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import quoted_name +import uuid + +# revision identifiers, used by Alembic. +revision = "3d91d6948753" +down_revision = "faac8092c123" +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table("sources", "sources_tmp") + + # Add UUID column. + op.add_column("sources_tmp", sa.Column("uuid", sa.String(length=36))) + + # Add UUIDs to sources_tmp table. + conn = op.get_bind() + sources = conn.execute(sa.text("SELECT * FROM sources_tmp")).fetchall() + + for source in sources: + conn.execute( + sa.text( + """UPDATE sources_tmp SET uuid=:source_uuid WHERE + id=:id""" + ).bindparams(source_uuid=str(uuid.uuid4()), id=source.id) + ) + + # Now create new table with unique constraint applied. + op.create_table( + quoted_name("sources", quote=False), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("filesystem_id", sa.String(length=96), nullable=True), + sa.Column("journalist_designation", sa.String(length=255), nullable=False), + sa.Column("flagged", sa.Boolean(), nullable=True), + sa.Column("last_updated", sa.DateTime(), nullable=True), + sa.Column("pending", sa.Boolean(), nullable=True), + sa.Column("interaction_count", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + sa.UniqueConstraint("filesystem_id"), + ) + + # Data Migration: move all sources into the new table. + conn.execute( + """ + INSERT INTO sources + SELECT id, uuid, filesystem_id, journalist_designation, flagged, + last_updated, pending, interaction_count + FROM sources_tmp + """ + ) + + # Now delete the old table. + op.drop_table("sources_tmp") + + +def downgrade(): + with op.batch_alter_table("sources", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/3da3fcab826a_delete_orphaned_submissions.py @@ -0,0 +1,105 @@ +"""delete orphaned submissions and replies + +Ref: https://github.com/freedomofpress/securedrop/issues/1189 + +Revision ID: 3da3fcab826a +Revises: 60f41bb14d98 +Create Date: 2018-11-25 19:40:25.873292 + +""" +import os +from alembic import op +import sqlalchemy as sa + +# raise the errors if we're not in production +raise_errors = os.environ.get("SECUREDROP_ENV", "prod") != "prod" + +try: + from journalist_app import create_app + from sdconfig import config + from store import NoFileFoundException, TooManyFilesException +except ImportError: + # This is a fresh install, and config.py has not been created yet. + if raise_errors: + raise + +# revision identifiers, used by Alembic. +revision = '3da3fcab826a' +down_revision = '60f41bb14d98' +branch_labels = None +depends_on = None + + +def raw_sql_grab_orphaned_objects(table_name): + """Objects that have a source ID that doesn't exist in the + sources table OR a NULL source ID should be deleted.""" + return ('SELECT id, filename, source_id FROM {table} ' # nosec + 'WHERE source_id NOT IN (SELECT id FROM sources) ' + 'UNION SELECT id, filename, source_id FROM {table} ' # nosec + 'WHERE source_id IS NULL').format(table=table_name) + + +def upgrade(): + conn = op.get_bind() + submissions = conn.execute( + sa.text(raw_sql_grab_orphaned_objects('submissions')) + ).fetchall() + + replies = conn.execute( + sa.text(raw_sql_grab_orphaned_objects('replies')) + ).fetchall() + + try: + app = create_app(config) + with app.app_context(): + for submission in submissions: + try: + conn.execute( + sa.text(""" + DELETE FROM submissions + WHERE id=:id + """).bindparams(id=submission.id) + ) + + path = app.storage.path_without_filesystem_id(submission.filename) + app.storage.move_to_shredder(path) + except NoFileFoundException: + # The file must have been deleted by the admin, remove the row + conn.execute( + sa.text(""" + DELETE FROM submissions + WHERE id=:id + """).bindparams(id=submission.id) + ) + except TooManyFilesException: + pass + + for reply in replies: + try: + conn.execute( + sa.text(""" + DELETE FROM replies + WHERE id=:id + """).bindparams(id=reply.id) + ) + + path = app.storage.path_without_filesystem_id(reply.filename) + app.storage.move_to_shredder(path) + except NoFileFoundException: + # The file must have been deleted by the admin, remove the row + conn.execute( + sa.text(""" + DELETE FROM replies + WHERE id=:id + """).bindparams(id=reply.id) + ) + except TooManyFilesException: + pass + except: # noqa + if raise_errors: + raise + + +def downgrade(): + # This is a destructive alembic migration, it cannot be downgraded + pass diff --git a/securedrop/alembic/versions/523fff3f969c_add_versioned_instance_config.py b/securedrop/alembic/versions/523fff3f969c_add_versioned_instance_config.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/523fff3f969c_add_versioned_instance_config.py @@ -0,0 +1,41 @@ +"""add versioned instance config + +Revision ID: 523fff3f969c +Revises: 3da3fcab826a +Create Date: 2019-11-02 23:06:12.161868 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '523fff3f969c' +down_revision = '3da3fcab826a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('instance_config', + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('valid_until', sa.DateTime(), nullable=True), + sa.Column('allow_document_uploads', sa.Boolean(), nullable=True), + + sa.PrimaryKeyConstraint('version'), + sa.UniqueConstraint('valid_until'), + ) + # ### end Alembic commands ### + + # Data migration: Since allow_document_uploads is the first + # instance_config setting (column), all we have to do is insert a + # row with its default value. + conn = op.get_bind() + conn.execute("""INSERT INTO instance_config (allow_document_uploads) VALUES (1)""") + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('instance_config') + # ### end Alembic commands ### diff --git a/securedrop/alembic/versions/60f41bb14d98_add_session_nonce_to_journalist.py b/securedrop/alembic/versions/60f41bb14d98_add_session_nonce_to_journalist.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/60f41bb14d98_add_session_nonce_to_journalist.py @@ -0,0 +1,78 @@ +"""Add Session Nonce To Journalist + +Revision ID: 60f41bb14d98 +Revises: a9fe328b053a +Create Date: 2019-08-19 04:20:59.489516 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '60f41bb14d98' +down_revision = 'a9fe328b053a' +branch_labels = None +depends_on = None + + +def upgrade(): + # Save existing journalist table. + op.rename_table('journalists', 'journalists_tmp') + + # Add nonce column. + op.add_column('journalists_tmp', sa.Column('session_nonce', sa.Integer())) + + # Populate nonce column. + conn = op.get_bind() + journalists = conn.execute( + sa.text("SELECT * FROM journalists_tmp")).fetchall() + + for journalist in journalists: + conn.execute( + sa.text("""UPDATE journalists_tmp SET session_nonce=0 WHERE + id=:id""").bindparams(id=journalist.id) + ) + + # Now create new table with null constraint applied. + op.create_table('journalists', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('first_name', sa.String(length=255), nullable=True), + sa.Column('last_name', sa.String(length=255), nullable=True), + sa.Column('pw_salt', sa.Binary(), nullable=True), + sa.Column('pw_hash', sa.Binary(), nullable=True), + sa.Column('passphrase_hash', sa.String(length=256), nullable=True), + sa.Column('is_admin', sa.Boolean(), nullable=True), + sa.Column('session_nonce', sa.Integer(), nullable=False), + sa.Column('otp_secret', sa.String(length=16), nullable=True), + sa.Column('is_totp', sa.Boolean(), nullable=True), + sa.Column('hotp_counter', sa.Integer(), nullable=True), + sa.Column('last_token', sa.String(length=6), nullable=True), + sa.Column('created_on', sa.DateTime(), nullable=True), + sa.Column('last_access', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username'), + sa.UniqueConstraint('uuid') + ) + + conn = op.get_bind() + conn.execute(''' + INSERT INTO journalists + SELECT id, uuid, username, first_name, last_name, pw_salt, pw_hash, + passphrase_hash, is_admin, session_nonce, otp_secret, is_totp, + hotp_counter, last_token, created_on, last_access + FROM journalists_tmp + ''') + + # Now delete the old table. + op.drop_table('journalists_tmp') + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.drop_column('session_nonce') + + # ### end Alembic commands ### diff --git a/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/6db892e17271_add_reply_uuid.py @@ -0,0 +1,73 @@ +"""add reply UUID + +Revision ID: 6db892e17271 +Revises: e0a525cbab83 +Create Date: 2018-08-06 20:31:50.035066 + +""" +from alembic import op +import sqlalchemy as sa + +import uuid + +# revision identifiers, used by Alembic. +revision = "6db892e17271" +down_revision = "e0a525cbab83" +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table("replies", "replies_tmp") + + # Add new column. + op.add_column("replies_tmp", sa.Column("uuid", sa.String(length=36))) + + # Populate new column in replies_tmp table. + conn = op.get_bind() + replies = conn.execute(sa.text("SELECT * FROM replies_tmp")).fetchall() + + for reply in replies: + conn.execute( + sa.text( + """UPDATE replies_tmp SET uuid=:reply_uuid WHERE + id=:id""" + ).bindparams(reply_uuid=str(uuid.uuid4()), id=reply.id) + ) + + # Now create new table with constraints applied to UUID column. + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("deleted_by_source", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + ) + + # Data Migration: move all replies into the new table. + conn.execute( + """ + INSERT INTO replies + SELECT id, uuid, journalist_id, source_id, filename, size, + deleted_by_source + FROM replies_tmp + """ + ) + + # Now delete the old table. + op.drop_table("replies_tmp") + + +def downgrade(): + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("uuid") + + # ### end Alembic commands ### diff --git a/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py b/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/a9fe328b053a_migrations_for_0_14_0.py @@ -0,0 +1,28 @@ +"""Migrations for SecureDrop's 0.14.0 release + +Revision ID: a9fe328b053a +Revises: b58139cfdc8c +Create Date: 2019-05-21 20:23:30.005632 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a9fe328b053a' +down_revision = 'b58139cfdc8c' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.add_column(sa.Column('first_name', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('last_name', sa.String(length=255), nullable=True)) + + +def downgrade(): + with op.batch_alter_table('journalists', schema=None) as batch_op: + batch_op.drop_column('last_name') + batch_op.drop_column('first_name') diff --git a/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/b58139cfdc8c_add_checksum_columns_revoke_table.py @@ -0,0 +1,98 @@ +"""add checksum columns and revoke token table +Revision ID: b58139cfdc8c +Revises: f2833ac34bb6 +Create Date: 2019-04-02 10:45:05.178481 +""" +import os +from alembic import op +import sqlalchemy as sa + +# raise the errors if we're not in production +raise_errors = os.environ.get("SECUREDROP_ENV", "prod") != "prod" + +try: + from journalist_app import create_app + from models import Submission, Reply + from sdconfig import config + from store import queued_add_checksum_for_file + from worker import create_queue +except: # noqa + if raise_errors: + raise + +# revision identifiers, used by Alembic. +revision = "b58139cfdc8c" +down_revision = "f2833ac34bb6" +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.add_column(sa.Column("checksum", sa.String(length=255), nullable=True)) + + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.add_column(sa.Column("checksum", sa.String(length=255), nullable=True)) + + op.create_table( + "revoked_tokens", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("token", sa.Text(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("token"), + ) + + try: + app = create_app(config) + + # we need an app context for the rq worker extension to work properly + with app.app_context(): + conn = op.get_bind() + query = sa.text( + """SELECT submissions.id, sources.filesystem_id, submissions.filename + FROM submissions + INNER JOIN sources + ON submissions.source_id = sources.id + """ + ) + for (sub_id, filesystem_id, filename) in conn.execute(query): + full_path = app.storage.path(filesystem_id, filename) + create_queue().enqueue( + queued_add_checksum_for_file, + Submission, + int(sub_id), + full_path, + app.config["SQLALCHEMY_DATABASE_URI"], + ) + + query = sa.text( + """SELECT replies.id, sources.filesystem_id, replies.filename + FROM replies + INNER JOIN sources + ON replies.source_id = sources.id + """ + ) + for (rep_id, filesystem_id, filename) in conn.execute(query): + full_path = app.storage.path(filesystem_id, filename) + create_queue().enqueue( + queued_add_checksum_for_file, + Reply, + int(rep_id), + full_path, + app.config["SQLALCHEMY_DATABASE_URI"], + ) + except: # noqa + if raise_errors: + raise + + +def downgrade(): + op.drop_table("revoked_tokens") + + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.drop_column("checksum") + + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("checksum") diff --git a/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/e0a525cbab83_add_column_to_track_source_deletion_of_.py @@ -0,0 +1,68 @@ +"""add column to track source deletion of replies + +Revision ID: e0a525cbab83 +Revises: 2d0ce3ee5bdc +Create Date: 2018-08-02 00:07:59.242510 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "e0a525cbab83" +down_revision = "2d0ce3ee5bdc" +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table("replies", "replies_tmp") + + # Add new column. + op.add_column("replies_tmp", sa.Column("deleted_by_source", sa.Boolean())) + + # Populate deleted_by_source column in replies_tmp table. + conn = op.get_bind() + replies = conn.execute(sa.text("SELECT * FROM replies_tmp")).fetchall() + + for reply in replies: + conn.execute( + sa.text( + """UPDATE replies_tmp SET deleted_by_source=0 WHERE + id=:id""" + ).bindparams(id=reply.id) + ) + + # Now create new table with not null constraint applied to + # deleted_by_source. + op.create_table( + "replies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("journalist_id", sa.Integer(), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("deleted_by_source", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + ) + + # Data Migration: move all replies into the new table. + conn.execute( + """ + INSERT INTO replies + SELECT id, journalist_id, source_id, filename, size, deleted_by_source + FROM replies_tmp + """ + ) + + # Now delete the old table. + op.drop_table("replies_tmp") + + +def downgrade(): + with op.batch_alter_table("replies", schema=None) as batch_op: + batch_op.drop_column("deleted_by_source") diff --git a/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/f2833ac34bb6_add_uuid_column_for_users_table.py @@ -0,0 +1,77 @@ +"""add UUID column for users table + +Revision ID: f2833ac34bb6 +Revises: 6db892e17271 +Create Date: 2018-08-13 18:10:19.914274 + +""" +from alembic import op +import sqlalchemy as sa +import uuid + + +# revision identifiers, used by Alembic. +revision = "f2833ac34bb6" +down_revision = "6db892e17271" +branch_labels = None +depends_on = None + + +def upgrade(): + # Save existing journalist table. + op.rename_table("journalists", "journalists_tmp") + + # Add UUID column. + op.add_column("journalists_tmp", sa.Column("uuid", sa.String(length=36))) + + # Add UUIDs to journalists_tmp table. + conn = op.get_bind() + journalists = conn.execute(sa.text("SELECT * FROM journalists_tmp")).fetchall() + + for journalist in journalists: + conn.execute( + sa.text( + """UPDATE journalists_tmp SET uuid=:journalist_uuid WHERE + id=:id""" + ).bindparams(journalist_uuid=str(uuid.uuid4()), id=journalist.id) + ) + + # Now create new table with unique constraint applied. + op.create_table( + "journalists", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column("pw_salt", sa.Binary(), nullable=True), + sa.Column("pw_hash", sa.Binary(), nullable=True), + sa.Column("passphrase_hash", sa.String(length=256), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column("otp_secret", sa.String(length=16), nullable=True), + sa.Column("is_totp", sa.Boolean(), nullable=True), + sa.Column("hotp_counter", sa.Integer(), nullable=True), + sa.Column("last_token", sa.String(length=6), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("last_access", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + sa.UniqueConstraint("uuid"), + ) + + conn = op.get_bind() + conn.execute( + """ + INSERT INTO journalists + SELECT id, uuid, username, pw_salt, pw_hash, passphrase_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access + FROM journalists_tmp + """ + ) + + # Now delete the old table. + op.drop_table("journalists_tmp") + + +def downgrade(): + with op.batch_alter_table("journalists", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py b/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py @@ -0,0 +1,26 @@ +"""enable security pragmas + +Revision ID: faac8092c123 +Revises: 15ac9509fc68 +Create Date: 2018-03-31 10:44:26.533395 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'faac8092c123' +down_revision = '15ac9509fc68' +branch_labels = None +depends_on = None + + +def upgrade(): + conn = op.get_bind() + conn.execute(sa.text('PRAGMA secure_delete = ON')) + conn.execute(sa.text('PRAGMA auto_vacuum = FULL')) + + +def downgrade(): + pass diff --git a/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py @@ -0,0 +1,68 @@ +"""create submission uuid column + +Revision ID: fccf57ceef02 +Revises: 3d91d6948753 +Create Date: 2018-07-12 00:06:20.891213 + +""" +from alembic import op +import sqlalchemy as sa + +import uuid + +# revision identifiers, used by Alembic. +revision = "fccf57ceef02" +down_revision = "3d91d6948753" +branch_labels = None +depends_on = None + + +def upgrade(): + # Schema migration + op.rename_table("submissions", "submissions_tmp") + + # Add UUID column. + op.add_column("submissions_tmp", sa.Column("uuid", sa.String(length=36))) + + # Add UUIDs to submissions_tmp table. + conn = op.get_bind() + submissions = conn.execute(sa.text("SELECT * FROM submissions_tmp")).fetchall() + + for submission in submissions: + conn.execute( + sa.text( + """UPDATE submissions_tmp SET uuid=:submission_uuid WHERE + id=:id""" + ).bindparams(submission_uuid=str(uuid.uuid4()), id=submission.id) + ) + + # Now create new table with unique constraint applied. + op.create_table( + "submissions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("filename", sa.String(length=255), nullable=False), + sa.Column("size", sa.Integer(), nullable=False), + sa.Column("downloaded", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + ) + + # Data Migration: move all submissions into the new table. + conn.execute( + """ + INSERT INTO submissions + SELECT id, uuid, source_id, filename, size, downloaded + FROM submissions_tmp + """ + ) + + # Now delete the old table. + op.drop_table("submissions_tmp") + + +def downgrade(): + with op.batch_alter_table("submissions", schema=None) as batch_op: + batch_op.drop_column("uuid") diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py new file mode 100755 --- /dev/null +++ b/securedrop/create-dev-data.py @@ -0,0 +1,116 @@ +#!/opt/venvs/securedrop-app-code/bin/python +# -*- coding: utf-8 -*- + +import datetime +import os +import argparse + +from flask import current_app +from sqlalchemy.exc import IntegrityError + +os.environ["SECUREDROP_ENV"] = "dev" # noqa +import journalist_app + +from sdconfig import config +from db import db +from models import Journalist, Reply, Source, Submission + + +def main(staging=False): + app = journalist_app.create_app(config) + with app.app_context(): + # Add two test users + test_password = "correct horse battery staple profanity oil chewy" + test_otp_secret = "JHCOGO7VCER3EJ4L" + + add_test_user("journalist", + test_password, + test_otp_secret, + is_admin=True) + + if staging: + return + + add_test_user("dellsberg", + test_password, + test_otp_secret, + is_admin=False) + + # Add test sources and submissions + num_sources = int(os.getenv('NUM_SOURCES', 2)) + for _ in range(num_sources): + create_source_and_submissions() + + +def add_test_user(username, password, otp_secret, is_admin=False): + try: + user = Journalist(username=username, + password=password, + is_admin=is_admin) + user.otp_secret = otp_secret + db.session.add(user) + db.session.commit() + print('Test user successfully added: ' + 'username={}, password={}, otp_secret={}, is_admin={}' + ''.format(username, password, otp_secret, is_admin)) + except IntegrityError: + print("Test user already added") + db.session.rollback() + + +def create_source_and_submissions(num_submissions=2, num_replies=2): + # Store source in database + codename = current_app.crypto_util.genrandomid() + filesystem_id = current_app.crypto_util.hash_codename(codename) + journalist_designation = current_app.crypto_util.display_id() + source = Source(filesystem_id, journalist_designation) + source.pending = False + db.session.add(source) + db.session.commit() + + # Generate submissions directory and generate source key + os.mkdir(current_app.storage.path(source.filesystem_id)) + current_app.crypto_util.genkeypair(source.filesystem_id, codename) + + # Generate some test submissions + for _ in range(num_submissions): + source.interaction_count += 1 + fpath = current_app.storage.save_message_submission( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + 'test submission!' + ) + source.last_updated = datetime.datetime.utcnow() + submission = Submission(source, fpath) + db.session.add(submission) + + # Generate some test replies + for _ in range(num_replies): + source.interaction_count += 1 + fname = "{}-{}-reply.gpg".format(source.interaction_count, + source.journalist_filename) + current_app.crypto_util.encrypt( + 'this is a test reply!', + [current_app.crypto_util.getkey(source.filesystem_id), + config.JOURNALIST_KEY], + current_app.storage.path(source.filesystem_id, fname)) + + journalist = Journalist.query.first() + reply = Reply(journalist, source, fname) + db.session.add(reply) + + db.session.commit() + + print("Test source (codename: '{}', journalist designation '{}') " + "added with {} submissions and {} replies".format( + codename, journalist_designation, num_submissions, num_replies)) + + +if __name__ == "__main__": # pragma: no cover + parser = argparse.ArgumentParser() + parser.add_argument("--staging", help="Adding user for staging tests.", + action="store_true") + args = parser.parse_args() + + main(args.staging) diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -1,63 +1,323 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- -from base64 import b32encode +import collections +from distutils.version import StrictVersion +import pretty_bad_protocol as gnupg import os -import subprocess - -from Cryptodome.Random import random -import gnupg -from gnupg._util import _is_stream, _make_binary_stream +import io import scrypt +from random import SystemRandom + +from base64 import b32encode +from datetime import date +from flask import current_app +from pretty_bad_protocol._util import _is_stream, _make_binary_stream + +import rm + +import typing +# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401stream + from typing import Dict, List, Text # noqa: F401 -import config -import store # to fix gpg error #78 on production os.environ['USERNAME'] = 'www-data' -GPG_KEY_TYPE = "RSA" -if os.environ.get('SECUREDROP_ENV') == 'test': - # Optimize crypto to speed up tests (at the expense of security - DO NOT - # use these settings in production) - GPG_KEY_LENGTH = 1024 - SCRYPT_PARAMS = dict(N=2**1, r=1, p=1) -else: # pragma: no cover - GPG_KEY_LENGTH = 4096 - SCRYPT_PARAMS = config.SCRYPT_PARAMS +# SystemRandom sources from the system rand (e.g. urandom, CryptGenRandom, etc) +# It supplies a CSPRNG but with an interface that supports methods like choice +random = SystemRandom() + +# safe characters for every possible word in the wordlist includes capital +# letters because codename hashes are base32-encoded with capital letters +DICEWARE_SAFE_CHARS = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzA' + 'BCDEFGHIJKLMNOPQRSTUVWXYZ') -SCRYPT_ID_PEPPER = config.SCRYPT_ID_PEPPER -SCRYPT_GPG_PEPPER = config.SCRYPT_GPG_PEPPER -DEFAULT_WORDS_IN_RANDOM_ID = 8 +def monkey_patch_delete_handle_status(self, key, value): + """ + Parse a status code from the attached GnuPG process. + :raises: :exc:`~exceptions.ValueError` if the status message is unknown. + """ + if key in ("DELETE_PROBLEM", "KEY_CONSIDERED"): + self.status = self.problem_reason.get(value, "Unknown error: %r" % value) + elif key in ("PINENTRY_LAUNCHED"): + self.status = key.replace("_", " ").lower() + else: + raise ValueError("Unknown status message: %r" % key) + +# Monkey patching to resolve https://github.com/freedomofpress/securedrop/issues/4294 +gnupg._parsers.DeleteResult._handle_status = monkey_patch_delete_handle_status -# Make sure these pass before the app can run -# TODO: Add more tests -def do_runtime_tests(): - if config.SCRYPT_ID_PEPPER == config.SCRYPT_GPG_PEPPER: - raise AssertionError('SCRYPT_ID_PEPPER == SCRYPT_GPG_PEPPER') - # crash if we don't have srm: - try: - subprocess.check_call(['srm'], stdout=subprocess.PIPE) - except subprocess.CalledProcessError: - pass +class FIFOCache(): + """ + We implemented this simple cache instead of using functools.lru_cache + (this uses a different cache replacement policy (FIFO), but either + FIFO or LRU works for our key fingerprint cache) + due to the inability to remove an item from its cache. + + See: https://bugs.python.org/issue28178 + """ + def __init__(self, maxsize: int): + self.maxsize = maxsize + self.cache = collections.OrderedDict() # type: collections.OrderedDict -do_runtime_tests() + def get(self, item): + if item in self.cache: + return self.cache[item] -gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR) + def put(self, item, value): + self.cache[item] = value + if len(self.cache) > self.maxsize: + self.cache.popitem(last=False) -# map code for a given language to a localized wordlist -language2words = {} -nouns = open(config.NOUNS).read().rstrip('\n').split('\n') -adjectives = open(config.ADJECTIVES).read().rstrip('\n').split('\n') + def delete(self, item): + del self.cache[item] class CryptoException(Exception): pass +class CryptoUtil: + + GPG_KEY_TYPE = "RSA" + DEFAULT_WORDS_IN_RANDOM_ID = 8 + + # All reply keypairs will be "created" on the same day SecureDrop (then + # Strongbox) was publicly released for the first time. + # https://www.newyorker.com/news/news-desk/strongbox-and-aaron-swartz + DEFAULT_KEY_CREATION_DATE = date(2013, 5, 14) + + # '0' is the magic value that tells GPG's batch key generation not + # to set an expiration date. + DEFAULT_KEY_EXPIRATION_DATE = '0' + + keycache_limit = 1000 + keycache = FIFOCache(keycache_limit) + + def __init__(self, + scrypt_params, + scrypt_id_pepper, + scrypt_gpg_pepper, + securedrop_root, + word_list, + nouns_file, + adjectives_file, + gpg_key_dir): + self.__securedrop_root = securedrop_root + self.__word_list = word_list + + if os.environ.get('SECUREDROP_ENV') == 'test': + # Optimize crypto to speed up tests (at the expense of security + # DO NOT use these settings in production) + self.__gpg_key_length = 1024 + self.scrypt_params = dict(N=2**1, r=1, p=1) + else: # pragma: no cover + self.__gpg_key_length = 4096 + self.scrypt_params = scrypt_params + + self.scrypt_id_pepper = scrypt_id_pepper + self.scrypt_gpg_pepper = scrypt_gpg_pepper + + self.do_runtime_tests() + + # --pinentry-mode, required for SecureDrop on gpg 2.1.x+, was + # added in gpg 2.1. + self.gpg_key_dir = gpg_key_dir + gpg_binary = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + if StrictVersion(gpg_binary.binary_version) >= StrictVersion('2.1'): + self.gpg = gnupg.GPG(binary='gpg2', + homedir=gpg_key_dir, + options=['--pinentry-mode loopback']) + else: + self.gpg = gpg_binary + + # map code for a given language to a localized wordlist + self.__language2words = {} # type: Dict[Text, List[str]] + + with io.open(nouns_file) as f: + self.nouns = f.read().splitlines() + + with io.open(adjectives_file) as f: + self.adjectives = f.read().splitlines() + + # Make sure these pass before the app can run + def do_runtime_tests(self): + if self.scrypt_id_pepper == self.scrypt_gpg_pepper: + raise AssertionError('scrypt_id_pepper == scrypt_gpg_pepper') + # crash if we don't have a way to securely remove files + if not rm.check_secure_delete_capability(): + raise AssertionError("Secure file deletion is not possible.") + + def get_wordlist(self, locale): + # type: (Text) -> List[str] + """" Ensure the wordlist for the desired locale is read and available + in the words global variable. If there is no wordlist for the + desired local, fallback to the default english wordlist. + + The localized wordlist are read from wordlists/{locale}.txt but + for backward compatibility purposes the english wordlist is read + from the config.WORD_LIST file. + """ + + if locale not in self.__language2words: + if locale != 'en': + path = os.path.join(self.__securedrop_root, + 'wordlists', + locale + '.txt') + if os.path.exists(path): + wordlist_path = path + else: + wordlist_path = self.__word_list + else: + wordlist_path = self.__word_list + + with io.open(wordlist_path) as f: + content = f.read().splitlines() + self.__language2words[locale] = content + + return self.__language2words[locale] + + def genrandomid(self, + words_in_random_id=None, + locale='en'): + if words_in_random_id is None: + words_in_random_id = self.DEFAULT_WORDS_IN_RANDOM_ID + return ' '.join(random.choice(self.get_wordlist(locale)) + for x in range(words_in_random_id)) + + def display_id(self): + return ' '.join([random.choice(self.adjectives), + random.choice(self.nouns)]) + + def hash_codename(self, codename, salt=None): + """Salts and hashes a codename using scrypt. + + :param str codename: A source's codename. + :param str salt: The salt to mix with the codename when hashing. + :returns: A base32 encoded string; the salted codename hash. + """ + if salt is None: + salt = self.scrypt_id_pepper + return b32encode(scrypt.hash(clean(codename), + salt, + **self.scrypt_params)).decode('utf-8') + + def genkeypair(self, name, secret): + """Generate a GPG key through batch file key generation. A source's + codename is salted with SCRYPT_GPG_PEPPER and hashed with scrypt to + provide the passphrase used to encrypt their private key. Their name + should be their filesystem id. + + >>> if not gpg.list_keys(hash_codename('randomid')): + ... genkeypair(hash_codename('randomid'), 'randomid').type + ... else: + ... u'P' + u'P' + + :param str name: The source's filesystem id (their codename, salted + with SCRYPT_ID_PEPPER, and hashed with scrypt). + :param str secret: The source's codename. + :returns: a :class:`GenKey <gnupg._parser.GenKey>` object, on which + the ``__str__()`` method may be called to return the + generated key's fingeprint. + + """ + name = clean(name) + secret = self.hash_codename(secret, salt=self.scrypt_gpg_pepper) + genkey_obj = self.gpg.gen_key(self.gpg.gen_key_input( + key_type=self.GPG_KEY_TYPE, + key_length=self.__gpg_key_length, + passphrase=secret, + name_email=name, + creation_date=self.DEFAULT_KEY_CREATION_DATE.isoformat(), + expire_date=self.DEFAULT_KEY_EXPIRATION_DATE + )) + return genkey_obj + + def delete_reply_keypair(self, source_filesystem_id): + key = self.getkey(source_filesystem_id) + # If this source was never flagged for review, they won't have a reply + # keypair + if not key: + return + + # Always delete keys without invoking pinentry-mode = loopback + # see: https://lists.gnupg.org/pipermail/gnupg-users/2016-May/055965.html + temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + # The subkeys keyword argument deletes both secret and public keys. + temp_gpg.delete_keys(key, secret=True, subkeys=True) + self.keycache.delete(source_filesystem_id) + + def getkey(self, name): + fingerprint = self.keycache.get(name) + if fingerprint: # cache hit + return fingerprint + + # cache miss + for key in self.gpg.list_keys(): + for uid in key['uids']: + if name in uid: + self.keycache.put(name, key['fingerprint']) + return key['fingerprint'] + + return None + + def export_pubkey(self, name): + fingerprint = self.getkey(name) + if fingerprint: + return self.gpg.export_keys(fingerprint) + else: + return None + + def encrypt(self, plaintext, fingerprints, output=None): + # Verify the output path + if output: + current_app.storage.verify(output) + + if not isinstance(fingerprints, (list, tuple)): + fingerprints = [fingerprints, ] + # Remove any spaces from provided fingerprints GPG outputs fingerprints + # with spaces for readability, but requires the spaces to be removed + # when using fingerprints to specify recipients. + fingerprints = [fpr.replace(' ', '') for fpr in fingerprints] + + if not _is_stream(plaintext): + plaintext = _make_binary_stream(plaintext, "utf_8") + + out = self.gpg.encrypt(plaintext, + *fingerprints, + output=output, + always_trust=True, + armor=False) + if out.ok: + return out.data + else: + raise CryptoException(out.stderr) + + def decrypt(self, secret, ciphertext): + """ + >>> crypto = current_app.crypto_util + >>> key = crypto.genkeypair('randomid', 'randomid') + >>> message = u'Buenos días, mundo hermoso!' + >>> ciphertext = crypto.encrypt(message, str(key)) + >>> crypto.decrypt('randomid', ciphertext) == message.encode('utf-8') + True + """ + hashed_codename = self.hash_codename(secret, + salt=self.scrypt_gpg_pepper) + data = self.gpg.decrypt(ciphertext, passphrase=hashed_codename).data + + return data.decode('utf-8') + + def clean(s, also=''): """ >>> clean("[]") @@ -67,154 +327,9 @@ def clean(s, also=''): >>> clean("Helloworld") 'Helloworld' """ - # safe characters for every possible word in the wordlist includes capital - # letters because codename hashes are base32-encoded with capital letters - ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ' - 'KLMNOPQRSTUVWXYZ') for c in s: - if c not in ok and c not in also: + if c not in DICEWARE_SAFE_CHARS and c not in also: raise CryptoException("invalid input: {0}".format(s)) # scrypt.hash requires input of type str. Since the wordlist is all ASCII # characters, this conversion is not problematic return str(s) - - -def _get_wordlist(locale): - """" Ensure the wordlist for the desired locale is read and available - in the words global variable. If there is no wordlist for the - desired local, fallback to the default english wordlist. - - The localized wordlist are read from wordlists/{locale}.txt but - for backward compatibility purposes the english wordlist is read - from the config.WORD_LIST file. - - """ - - if locale not in language2words: - if locale != 'en': - path = os.path.join(config.SECUREDROP_ROOT, - 'wordlists', - locale + '.txt') - if os.path.exists(path): - wordlist_path = path - else: - wordlist_path = config.WORD_LIST - else: - wordlist_path = config.WORD_LIST - - language2words[locale] = open( - wordlist_path).read().rstrip('\n').split('\n') - - return language2words[locale] - - -def genrandomid(words_in_random_id=DEFAULT_WORDS_IN_RANDOM_ID, locale='en'): - return ' '.join(random.choice(_get_wordlist(locale)) - for x in range(words_in_random_id)) - - -def display_id(): - return ' '.join([random.choice(adjectives), random.choice(nouns)]) - - -def hash_codename(codename, salt=SCRYPT_ID_PEPPER): - """Salts and hashes a codename using scrypt. - - :param str codename: A source's codename. - :param str salt: The salt to mix with the codename when hashing. - :returns: A base32 encoded string; the salted codename hash. - """ - return b32encode(scrypt.hash(clean(codename), salt, **SCRYPT_PARAMS)) - - -def genkeypair(name, secret): - """Generate a GPG key through batch file key generation. A source's - codename is salted with SCRYPT_GPG_PEPPER and hashed with scrypt to - provide the passphrase used to encrypt their private key. Their name - should be their filesystem id. - - >>> if not gpg.list_keys(hash_codename('randomid')): - ... genkeypair(hash_codename('randomid'), 'randomid').type - ... else: - ... u'P' - u'P' - - :param str name: The source's filesystem id (their codename, salted - with SCRYPT_ID_PEPPER, and hashed with scrypt). - :param str secret: The source's codename. - :returns: a :class:`GenKey <gnupg._parser.GenKey>` object, on which - the ``__str__()`` method may be called to return the - generated key's fingeprint. - - """ - name = clean(name) - secret = hash_codename(secret, salt=SCRYPT_GPG_PEPPER) - return gpg.gen_key(gpg.gen_key_input( - key_type=GPG_KEY_TYPE, key_length=GPG_KEY_LENGTH, - passphrase=secret, - name_email=name - )) - - -def delete_reply_keypair(source_filesystem_id): - key = getkey(source_filesystem_id) - # If this source was never flagged for review, they won't have a reply - # keypair - if not key: - return - # The private key needs to be deleted before the public key can be deleted - # http://pythonhosted.org/python-gnupg/#deleting-keys - gpg.delete_keys(key, True) # private key - gpg.delete_keys(key) # public key - # TODO: srm? - - -def getkey(name): - for key in gpg.list_keys(): - for uid in key['uids']: - if name in uid: - return key['fingerprint'] - return None - - -def encrypt(plaintext, fingerprints, output=None): - # Verify the output path - if output: - store.verify(output) - - if not isinstance(fingerprints, (list, tuple)): - fingerprints = [fingerprints, ] - # Remove any spaces from provided fingerprints GPG outputs fingerprints - # with spaces for readability, but requires the spaces to be removed when - # using fingerprints to specify recipients. - fingerprints = [fpr.replace(' ', '') for fpr in fingerprints] - - if not _is_stream(plaintext): - plaintext = _make_binary_stream(plaintext, "utf_8") - - out = gpg.encrypt(plaintext, - *fingerprints, - output=output, - always_trust=True, - armor=False) - if out.ok: - return out.data - else: - raise CryptoException(out.stderr) - - -def decrypt(secret, ciphertext): - """ - >>> key = genkeypair('randomid', 'randomid') - >>> decrypt('randomid', - ... encrypt('Goodbye, cruel world!', str(key)) - ... ) - 'Goodbye, cruel world!' - """ - hashed_codename = hash_codename(secret, salt=SCRYPT_GPG_PEPPER) - return gpg.decrypt(ciphertext, passphrase=hashed_codename).data - - -if __name__ == "__main__": # pragma: no cover - import doctest - doctest.testmod() diff --git a/securedrop/db.py b/securedrop/db.py --- a/securedrop/db.py +++ b/securedrop/db.py @@ -1,472 +1,5 @@ -import os -import datetime -import base64 -import binascii +# -*- coding: utf-8 -*- -# Find the best implementation available on this platform -try: - from cStringIO import StringIO -except: - from StringIO import StringIO +from flask_sqlalchemy import SQLAlchemy -from sqlalchemy import create_engine, ForeignKey -from sqlalchemy.orm import scoped_session, sessionmaker, relationship, backref -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary -from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound -from jinja2 import Markup - -import scrypt -import pyotp - -import qrcode -# Using svg because it doesn't require additional dependencies -import qrcode.image.svg - -import config -import store - - -LOGIN_HARDENING = True -# Unfortunately, the login hardening measures mess with the tests in -# non-deterministic ways. TODO rewrite the tests so we can more -# precisely control which code paths are exercised. -if os.environ.get('SECUREDROP_ENV') == 'test': - LOGIN_HARDENING = False - -# http://flask.pocoo.org/docs/patterns/sqlalchemy/ - -if config.DATABASE_ENGINE == "sqlite": - engine = create_engine( - config.DATABASE_ENGINE + ":///" + - config.DATABASE_FILE - ) -else: # pragma: no cover - engine = create_engine( - config.DATABASE_ENGINE + '://' + - config.DATABASE_USERNAME + ':' + - config.DATABASE_PASSWORD + '@' + - config.DATABASE_HOST + '/' + - config.DATABASE_NAME, echo=False - ) - -db_session = scoped_session(sessionmaker(autocommit=False, - autoflush=False, - bind=engine)) -Base = declarative_base() -Base.query = db_session.query_property() - - -def get_one_or_else(query, logger, failure_method): - try: - return query.one() - except MultipleResultsFound as e: - logger.error( - "Found multiple while executing %s when one was expected: %s" % - (query, e, )) - failure_method(500) - except NoResultFound as e: - logger.error("Found none when one was expected: %s" % (e,)) - failure_method(404) - - -class Source(Base): - __tablename__ = 'sources' - id = Column(Integer, primary_key=True) - filesystem_id = Column(String(96), unique=True) - journalist_designation = Column(String(255), nullable=False) - flagged = Column(Boolean, default=False) - last_updated = Column(DateTime, default=datetime.datetime.utcnow) - star = relationship("SourceStar", uselist=False, backref="source") - - # sources are "pending" and don't get displayed to journalists until they - # submit something - pending = Column(Boolean, default=True) - - # keep track of how many interactions have happened, for filenames - interaction_count = Column(Integer, default=0, nullable=False) - - # Don't create or bother checking excessively long codenames to prevent DoS - NUM_WORDS = 7 - MAX_CODENAME_LEN = 128 - - def __init__(self, filesystem_id=None, journalist_designation=None): - self.filesystem_id = filesystem_id - self.journalist_designation = journalist_designation - - def __repr__(self): - return '<Source %r>' % (self.journalist_designation) - - @property - def journalist_filename(self): - valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_' - return ''.join([c for c in self.journalist_designation.lower().replace( - ' ', '_') if c in valid_chars]) - - def documents_messages_count(self): - try: - return self.docs_msgs_count - except AttributeError: - self.docs_msgs_count = {'messages': 0, 'documents': 0} - for submission in self.submissions: - if submission.filename.endswith('msg.gpg'): - self.docs_msgs_count['messages'] += 1 - elif (submission.filename.endswith('doc.gz.gpg') or - submission.filename.endswith('doc.zip.gpg')): - self.docs_msgs_count['documents'] += 1 - return self.docs_msgs_count - - @property - def collection(self): - """Return the list of submissions and replies for this source, sorted - in ascending order by the filename/interaction count.""" - collection = [] - collection.extend(self.submissions) - collection.extend(self.replies) - collection.sort(key=lambda x: int(x.filename.split('-')[0])) - return collection - - -class Submission(Base): - __tablename__ = 'submissions' - id = Column(Integer, primary_key=True) - source_id = Column(Integer, ForeignKey('sources.id')) - source = relationship( - "Source", - backref=backref("submissions", order_by=id, cascade="delete") - ) - - filename = Column(String(255), nullable=False) - size = Column(Integer, nullable=False) - downloaded = Column(Boolean, default=False) - - def __init__(self, source, filename): - self.source_id = source.id - self.filename = filename - self.size = os.stat(store.path(source.filesystem_id, filename)).st_size - - def __repr__(self): - return '<Submission %r>' % (self.filename) - - -class Reply(Base): - __tablename__ = "replies" - id = Column(Integer, primary_key=True) - - journalist_id = Column(Integer, ForeignKey('journalists.id')) - journalist = relationship( - "Journalist", - backref=backref( - 'replies', - order_by=id)) - - source_id = Column(Integer, ForeignKey('sources.id')) - source = relationship( - "Source", - backref=backref("replies", order_by=id, cascade="delete") - ) - - filename = Column(String(255), nullable=False) - size = Column(Integer, nullable=False) - - def __init__(self, journalist, source, filename): - self.journalist_id = journalist.id - self.source_id = source.id - self.filename = filename - self.size = os.stat(store.path(source.filesystem_id, filename)).st_size - - def __repr__(self): - return '<Reply %r>' % (self.filename) - - -class SourceStar(Base): - __tablename__ = 'source_stars' - id = Column("id", Integer, primary_key=True) - source_id = Column("source_id", Integer, ForeignKey('sources.id')) - starred = Column("starred", Boolean, default=True) - - def __eq__(self, other): - if isinstance(other, SourceStar): - return (self.source_id == other.source_id and - self.id == other.id and self.starred == other.starred) - return NotImplemented - - def __init__(self, source, starred=True): - self.source_id = source.id - self.starred = starred - - -class InvalidUsernameException(Exception): - - """Raised when a user logs in with an invalid username""" - - -class LoginThrottledException(Exception): - - """Raised when a user attempts to log in - too many times in a given time period""" - - -class WrongPasswordException(Exception): - - """Raised when a user logs in with an incorrect password""" - - -class BadTokenException(Exception): - - """Raised when a user logins in with an incorrect TOTP token""" - - -class PasswordError(Exception): - - """Generic error for passwords that are invalid. - """ - - -class InvalidPasswordLength(PasswordError): - """Raised when attempting to create a Journalist or log in with an invalid - password length. - """ - - def __init__(self, password): - self.pw_len = len(password) - - def __str__(self): - if self.pw_len > Journalist.MAX_PASSWORD_LEN: - return "Password too long (len={})".format(self.pw_len) - if self.pw_len < Journalist.MIN_PASSWORD_LEN: - return "Password needs to be at least {} characters".format( - Journalist.MIN_PASSWORD_LEN - ) - - -class NonDicewarePassword(PasswordError): - - """Raised when attempting to validate a password that is not diceware-like - """ - - -class Journalist(Base): - __tablename__ = "journalists" - id = Column(Integer, primary_key=True) - username = Column(String(255), nullable=False, unique=True) - pw_salt = Column(Binary(32)) - pw_hash = Column(Binary(256)) - is_admin = Column(Boolean) - - otp_secret = Column(String(16), default=pyotp.random_base32) - is_totp = Column(Boolean, default=True) - hotp_counter = Column(Integer, default=0) - last_token = Column(String(6)) - - created_on = Column(DateTime, default=datetime.datetime.utcnow) - last_access = Column(DateTime) - login_attempts = relationship( - "JournalistLoginAttempt", - backref="journalist") - - MIN_USERNAME_LEN = 3 - - def __init__(self, username, password, is_admin=False, otp_secret=None): - self.check_username_acceptable(username) - self.username = username - self.set_password(password) - self.is_admin = is_admin - - if otp_secret: - self.set_hotp_secret(otp_secret) - - def __repr__(self): - return "<Journalist {0}{1}>".format( - self.username, - " [admin]" if self.is_admin else "") - - def _gen_salt(self, salt_bytes=32): - return os.urandom(salt_bytes) - - _SCRYPT_PARAMS = dict(N=2**14, r=8, p=1) - - def _scrypt_hash(self, password, salt, params=None): - if not params: - params = self._SCRYPT_PARAMS - return scrypt.hash(str(password), salt, **params) - - MAX_PASSWORD_LEN = 128 - MIN_PASSWORD_LEN = 14 - - def set_password(self, password): - self.check_password_acceptable(password) - - # Don't do anything if user's password hasn't changed. - if self.pw_hash and self.valid_password(password): - return - - self.pw_salt = self._gen_salt() - self.pw_hash = self._scrypt_hash(password, self.pw_salt) - - @classmethod - def check_username_acceptable(cls, username): - if len(username) < cls.MIN_USERNAME_LEN: - raise InvalidUsernameException( - 'Username "{}" must be at least {} characters long.' - .format(username, cls.MIN_USERNAME_LEN)) - - @classmethod - def check_password_acceptable(cls, password): - # Enforce a reasonable maximum length for passwords to avoid DoS - if len(password) > cls.MAX_PASSWORD_LEN: - raise InvalidPasswordLength(password) - - # Enforce a reasonable minimum length for new passwords - if len(password) < cls.MIN_PASSWORD_LEN: - raise InvalidPasswordLength(password) - - # Ensure all passwords are "diceware-like" - if len(password.split()) < 7: - raise NonDicewarePassword() - - def valid_password(self, password): - # Avoid hashing passwords that are over the maximum length - if len(password) > self.MAX_PASSWORD_LEN: - raise InvalidPasswordLength(password) - # No check on minimum password length here because some passwords - # may have been set prior to setting the minimum password length. - return pyotp.utils.compare_digest( - self._scrypt_hash(password, self.pw_salt), - self.pw_hash) - - def regenerate_totp_shared_secret(self): - self.otp_secret = pyotp.random_base32() - - def set_hotp_secret(self, otp_secret): - self.is_totp = False - self.otp_secret = base64.b32encode( - binascii.unhexlify( - otp_secret.replace( - " ", - ""))) - self.hotp_counter = 0 - - @property - def totp(self): - return pyotp.TOTP(self.otp_secret) - - @property - def hotp(self): - return pyotp.HOTP(self.otp_secret) - - @property - def shared_secret_qrcode(self): - uri = self.totp.provisioning_uri( - self.username, - issuer_name="SecureDrop") - - qr = qrcode.QRCode( - box_size=15, - image_factory=qrcode.image.svg.SvgPathImage - ) - qr.add_data(uri) - img = qr.make_image() - - svg_out = StringIO() - img.save(svg_out) - return Markup(svg_out.getvalue()) - - @property - def formatted_otp_secret(self): - """The OTP secret is easier to read and manually enter if it is all - lowercase and split into four groups of four characters. The secret is - base32-encoded, so it is case insensitive.""" - sec = self.otp_secret - chunks = [sec[i:i + 4] for i in range(0, len(sec), 4)] - return ' '.join(chunks).lower() - - def _format_token(self, token): - """Strips from authentication tokens the whitespace - that many clients add for readability""" - return ''.join(token.split()) - - def verify_token(self, token): - token = self._format_token(token) - - # Store latest token to prevent OTP token reuse - self.last_token = token - db_session.commit() - - if self.is_totp: - # Also check the given token against the previous and next - # valid tokens, to compensate for potential time skew - # between the client and the server. The total valid - # window is 1:30s. - return self.totp.verify(token, valid_window=1) - else: - for counter_val in range( - self.hotp_counter, - self.hotp_counter + 20): - if self.hotp.verify(token, counter_val): - self.hotp_counter = counter_val + 1 - db_session.commit() - return True - return False - - _LOGIN_ATTEMPT_PERIOD = 60 # seconds - _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5 - - @classmethod - def throttle_login(cls, user): - # Record the login attempt... - login_attempt = JournalistLoginAttempt(user) - db_session.add(login_attempt) - db_session.commit() - - # ...and reject it if they have exceeded the threshold - login_attempt_period = datetime.datetime.utcnow() - \ - datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD) - attempts_within_period = JournalistLoginAttempt.query.filter( - JournalistLoginAttempt.timestamp > login_attempt_period).all() - if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD: - raise LoginThrottledException( - "throttled ({} attempts in last {} seconds)".format( - len(attempts_within_period), - cls._LOGIN_ATTEMPT_PERIOD)) - - @classmethod - def login(cls, username, password, token): - try: - user = Journalist.query.filter_by(username=username).one() - except NoResultFound: - raise InvalidUsernameException( - "invalid username '{}'".format(username)) - - if LOGIN_HARDENING: - cls.throttle_login(user) - - # Prevent TOTP token reuse - if user.last_token is not None: - if pyotp.utils.compare_digest(token, user.last_token): - raise BadTokenException("previously used token " - "{}".format(token)) - if not user.verify_token(token): - raise BadTokenException("invalid token") - if not user.valid_password(password): - raise WrongPasswordException("invalid password") - return user - - -class JournalistLoginAttempt(Base): - - """This model keeps track of journalist's login attempts so we can - rate limit them in order to prevent attackers from brute forcing - passwords or two-factor tokens.""" - __tablename__ = "journalist_login_attempt" - id = Column(Integer, primary_key=True) - timestamp = Column(DateTime, default=datetime.datetime.utcnow) - journalist_id = Column(Integer, ForeignKey('journalists.id')) - - def __init__(self, journalist): - self.journalist_id = journalist.id - - -# Declare (or import) models before init_db -def init_db(): - Base.metadata.create_all(bind=engine) +db = SQLAlchemy() diff --git a/securedrop/i18n.py b/securedrop/i18n.py --- a/securedrop/i18n.py +++ b/securedrop/i18n.py @@ -42,9 +42,7 @@ def setup_app(config, app): translation_dirs = getattr(config, 'TRANSLATION_DIRS', None) if translation_dirs is None: - translation_dirs = \ - path.join(path.dirname(path.realpath(__file__)), - 'translations') + translation_dirs = path.join(path.dirname(path.realpath(__file__)), 'translations') # `babel.translation_directories` is a nightmare # We need to set this manually via an absolute path @@ -80,14 +78,14 @@ def get_locale(config): """ locale = None accept_languages = [] - for l in request.accept_languages.values(): + for l in list(request.accept_languages.values()): if '-' in l: sep = '-' else: sep = '_' try: accept_languages.append(str(core.Locale.parse(l, sep))) - except: + except Exception: pass if 'l' in request.args: if len(request.args['l']) == 0: diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py new file mode 100755 --- /dev/null +++ b/securedrop/i18n_tool.py @@ -0,0 +1,465 @@ +#!/opt/venvs/securedrop-app-code/bin/python +# -*- coding: utf-8 -*- + +from __future__ import print_function +import argparse +import io +import logging +import os +import glob +import re +import signal +import subprocess +import sys +import textwrap +import version + +from os.path import dirname, join, realpath + +from sh import git, pybabel, sed, msgmerge, xgettext, msgfmt + +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s') +log = logging.getLogger(__name__) + + +class I18NTool(object): + + # + # The database of support language, indexed by the language code + # used by weblate (i.e. whatever shows as CODE in + # https://weblate.securedrop.org/projects/securedrop/securedrop/CODE/ + # is the index of the SUPPORTED_LANGUAGES database below. + # + # name: English name of the language to the documentation, not for + # display in the interface. + # desktop: The language code used for dekstop icons. + # + SUPPORTED_LANGUAGES = { + 'ar': {'name': 'Arabic', 'desktop': 'ar', }, + 'ca': {'name': 'Catalan', 'desktop': 'ca', }, + 'cs': {'name': 'Czech', 'desktop': 'cs', }, + 'de_DE': {'name': 'German', 'desktop': 'de_DE', }, + 'el': {'name': 'Greek', 'desktop': 'el', }, + 'es_ES': {'name': 'Spanish', 'desktop': 'es_ES', }, + 'fr_FR': {'name': 'French', 'desktop': 'fr', }, + 'hi': {'name': 'Hindi', 'desktop': 'hi', }, + 'is': {'name': 'Icelandic', 'desktop': 'is', }, + 'it_IT': {'name': 'Italian', 'desktop': 'it', }, + 'nb_NO': {'name': 'Norwegian', 'desktop': 'nb_NO', }, + 'nl': {'name': 'Dutch', 'desktop': 'nl', }, + 'pt_BR': {'name': 'Portuguese, Brasil', 'desktop': 'pt_BR', }, + 'ro': {'name': 'Romanian', 'desktop': 'ro', }, + 'ru': {'name': 'Russian', 'desktop': 'ru', }, + 'sk': {'name': 'Slovak', 'desktop': 'sk', }, + 'sv': {'name': 'Swedish', 'desktop': 'sv', }, + 'tr': {'name': 'Turkish', 'desktop': 'tr', }, + 'zh_Hant': {'name': 'Chinese, Traditional', 'desktop': 'zh_Hant', }, + } + + def file_is_modified(self, path): + dir = dirname(path) + return subprocess.call(['git', '-C', dir, 'diff', '--quiet', path]) + + def ensure_i18n_remote(self, args): + k = {'_cwd': args.root} + if b'i18n' not in git.remote(**k).stdout: + git.remote.add('i18n', args.url, **k) + git.fetch('i18n', **k) + + def translate_messages(self, args): + messages_file = os.path.join(args.translations_dir, 'messages.pot') + + if args.extract_update: + if not os.path.exists(args.translations_dir): + os.makedirs(args.translations_dir) + sources = args.sources.split(',') + pybabel.extract( + '--charset=utf-8', + '--mapping', args.mapping, + '--output', messages_file, + '--project=SecureDrop', + '--version', args.version, + "[email protected]", + "--copyright-holder=Freedom of the Press Foundation", + *sources) + sed('-i', '-e', '/^"POT-Creation-Date/d', messages_file) + + if (self.file_is_modified(messages_file) and + len(os.listdir(args.translations_dir)) > 1): + tglob = '{}/*/LC_MESSAGES/*.po'.format(args.translations_dir) + for translation in glob.iglob(tglob): + msgmerge('--previous', '--update', translation, + messages_file) + log.warning("messages translations updated in {}".format( + messages_file)) + else: + log.warning("messages translations are already up to date") + + if args.compile and len(os.listdir(args.translations_dir)) > 1: + pybabel.compile('--directory', args.translations_dir) + + def translate_desktop(self, args): + messages_file = os.path.join(args.translations_dir, 'desktop.pot') + + if args.extract_update: + sources = args.sources.split(',') + k = {'_cwd': args.translations_dir} + xgettext( + "--output=desktop.pot", + "--language=Desktop", + "--keyword", + "--keyword=Name", + "--package-version", args.version, + "[email protected]", + "--copyright-holder=Freedom of the Press Foundation", + *sources, + **k) + sed('-i', '-e', '/^"POT-Creation-Date/d', messages_file, **k) + + if self.file_is_modified(messages_file): + for f in os.listdir(args.translations_dir): + if not f.endswith('.po'): + continue + po_file = os.path.join(args.translations_dir, f) + msgmerge('--update', po_file, messages_file) + log.warning("messages translations updated in " + + messages_file) + else: + log.warning("desktop translations are already up to date") + + if args.compile: + pos = [f for f in os.listdir(args.translations_dir) if f.endswith('.po')] + linguas = [l[:-3] for l in pos] + content = "\n".join(linguas) + "\n" + open(join(args.translations_dir, 'LINGUAS'), 'w').write(content) + + for source in args.sources.split(','): + target = source.rstrip('.in') + msgfmt('--desktop', + '--template', source, + '-o', target, + '-d', '.', + _cwd=args.translations_dir) + + def set_translate_parser(self, + subps, + parser, + translations_dir, + sources): + parser.add_argument( + '--extract-update', + action='store_true', + help=('extract strings to translate and ' + 'update existing translations')) + parser.add_argument( + '--compile', + action='store_true', + help='compile translations') + parser.add_argument( + '--translations-dir', + default=translations_dir, + help='Base directory for translation files (default {})'.format( + translations_dir)) + parser.add_argument( + '--version', + default=version.__version__, + help=('SecureDrop version ' + 'to store in pot files (default {})'.format( + version.__version__))) + parser.add_argument( + '--sources', + default=sources, + help='Source files and directories to extract (default {})'.format( + sources)) + + def set_translate_messages_parser(self, subps): + parser = subps.add_parser('translate-messages', + help=('Update and compile ' + 'source and template translations')) + translations_dir = join(dirname(realpath(__file__)), 'translations') + sources = '.,source_templates,journalist_templates' + self.set_translate_parser(subps, parser, translations_dir, sources) + mapping = 'babel.cfg' + parser.add_argument( + '--mapping', + default=mapping, + help='Mapping of files to consider (default {})'.format( + mapping)) + parser.set_defaults(func=self.translate_messages) + + def set_translate_desktop_parser(self, subps): + parser = subps.add_parser('translate-desktop', + help=('Update and compile ' + 'desktop icons translations')) + translations_dir = join( + dirname(realpath(__file__)), + '../install_files/ansible-base/roles/tails-config/templates') + sources = 'desktop-journalist-icon.j2.in,desktop-source-icon.j2.in' + self.set_translate_parser(subps, parser, translations_dir, sources) + parser.set_defaults(func=self.translate_desktop) + + @staticmethod + def require_git_email_name(git_dir): + cmd = ('git -C {d} config --get user.name > /dev/null && ' + 'git -C {d} config --get user.email > /dev/null'.format( + d=git_dir)) + if subprocess.call(cmd, shell=True): # nosec + if u'docker' in io.open('/proc/1/cgroup').read(): + log.error("remember ~/.gitconfig does not exist " + "in the dev-shell Docker container, " + "only .git/config does") + raise Exception(cmd + ' returned false, please set name and email') + return True + + def update_docs(self, args): + l10n_content = u'.. GENERATED BY i18n_tool.py DO NOT EDIT:\n\n' + for (code, info) in sorted(I18NTool.SUPPORTED_LANGUAGES.items()): + l10n_content += '* ' + info['name'] + ' (``' + code + '``)\n' + includes = join(args.documentation_dir, 'includes') + l10n_txt = join(includes, 'l10n.txt') + io.open(l10n_txt, mode='w').write(l10n_content) + self.require_git_email_name(includes) + if self.file_is_modified(l10n_txt): + k = {'_cwd': includes} + git.add('l10n.txt', **k) + msg = 'docs: update the list of supported languages' + git.commit('-m', msg, 'l10n.txt', **k) + log.warning(l10n_txt + " updated") + git_show_out = git.show(**k) + log.warning(git_show_out) + else: + log.warning(l10n_txt + " already up to date") + + def set_update_docs_parser(self, subps): + parser = subps.add_parser('update-docs', + help=('Update the documentation')) + documentation_dir = join(dirname(realpath(__file__)), '..', 'docs') + parser.add_argument( + '--documentation-dir', + default=documentation_dir, + help=('root directory of the SecureDrop documentation' + ' (default {})'.format(documentation_dir))) + parser.set_defaults(func=self.update_docs) + + def update_from_weblate(self, args): + self.ensure_i18n_remote(args) + codes = list(I18NTool.SUPPORTED_LANGUAGES.keys()) + if args.supported_languages: + codes = args.supported_languages.split(',') + for code in sorted(codes): + info = I18NTool.SUPPORTED_LANGUAGES[code] + + def need_update(p): + exists = os.path.exists(join(args.root, p)) + k = {'_cwd': args.root} + git.checkout('i18n/i18n', '--', p, **k) + git.reset('HEAD', '--', p, **k) + if not exists: + return True + else: + return self.file_is_modified(join(args.root, p)) + + def add(p): + git('-C', args.root, 'add', p) + + updated = False + # + # Update messages + # + p = "securedrop/translations/{l}/LC_MESSAGES/messages.po".format( + l=code) # noqa: E741 + if need_update(p): + add(p) + updated = True + # + # Update desktop + # + desktop_code = info['desktop'] + p = join("install_files/ansible-base/roles", + "tails-config/templates/{l}.po".format( + l=desktop_code)) # noqa: E741 + if need_update(p): + add(p) + updated = True + + if updated: + self.upstream_commit(args, code) + + def translators(self, args, path, commit_range): + """ + Return the set of people who've modified a file in Weblate. + + Extracts all the authors of translation changes to the given + path in the given commit range. Translation changes are + identified by the presence of "Translated using Weblate" in + the commit message. + """ + translation_re = re.compile('Translated using Weblate') + + path_changes = git( + '--no-pager', '-C', args.root, + 'log', '--format=%aN\x1e%s', commit_range, '--', path, + _encoding='utf-8' + ) + path_changes = u"{}".format(path_changes) + path_changes = [c.split('\x1e') for c in path_changes.strip().split('\n')] + path_changes = [c for c in path_changes if len(c) > 1 and translation_re.match(c[1])] + + path_authors = [c[0] for c in path_changes] + return set(path_authors) + + def upstream_commit(self, args, code): + self.require_git_email_name(args.root) + authors = set() + diffs = u"{}".format(git('--no-pager', '-C', args.root, 'diff', '--name-only', '--cached')) + + for path in sorted(diffs.strip().split('\n')): + previous_message = u"{}".format(git( + '--no-pager', '-C', args.root, 'log', '-n', '1', path, + _encoding='utf-8')) + update_re = re.compile(r'(?:updated from| revision:) (\w+)') + m = update_re.search(previous_message) + if m: + origin = m.group(1) + else: + origin = '' + authors |= self.translators(args, path, '{}..i18n/i18n'.format(origin)) + + authors = u"\n ".join(sorted(authors)) + + current = git('-C', args.root, 'rev-parse', 'i18n/i18n') + info = I18NTool.SUPPORTED_LANGUAGES[code] + message = textwrap.dedent(u""" + l10n: updated {name} ({code}) + + contributors: + {authors} + + updated from: + repo: {remote} + revision: {current} + """).format( + remote=args.url, + name=info['name'], + authors=authors, + code=code, + current=current + ) + git('-C', args.root, 'commit', '-m', message) + + def set_update_from_weblate_parser(self, subps): + parser = subps.add_parser('update-from-weblate', + help=('Import translations from weblate')) + root = join(dirname(realpath(__file__)), '..') + parser.add_argument( + '--root', + default=root, + help=('root of the SecureDrop git repository' + ' (default {})'.format(root))) + url = 'https://github.com/freedomofpress/securedrop-i18n' + parser.add_argument( + '--url', + default=url, + help=('URL of the weblate repository' + ' (default {})'.format(url))) + parser.add_argument( + '--supported-languages', + help='comma separated list of supported languages') + parser.set_defaults(func=self.update_from_weblate) + + def set_list_locales_parser(self, subps): + parser = subps.add_parser('list-locales', help='List supported locales') + parser.set_defaults(func=self.list_locales) + + def list_locales(self, args): + print(sorted(list(self.SUPPORTED_LANGUAGES.keys()) + ['en_US'])) + + def set_list_translators_parser(self, subps): + parser = subps.add_parser('list-translators', + help=('List contributing translators')) + root = join(dirname(realpath(__file__)), '..') + parser.add_argument( + '--root', + default=root, + help=('root of the SecureDrop git repository' + ' (default {})'.format(root))) + url = 'https://github.com/freedomofpress/securedrop-i18n' + parser.add_argument( + '--url', + default=url, + help=('URL of the weblate repository' + ' (default {})'.format(url))) + parser.add_argument( + '--all', + action="store_true", + help=( + "List everyone who's ever contributed, instead of just since the last " + "sync from Weblate." + ) + ) + parser.set_defaults(func=self.list_translators) + + def get_last_sync(self): + commits = git('--no-pager', 'log', '--format=%h:%s', 'i18n/i18n', _encoding='utf-8') + for commit in commits: + commit_hash, msg = commit.split(':', 1) + if msg.startswith("l10n: sync "): + return commit_hash + return "" + + def list_translators(self, args): + self.ensure_i18n_remote(args) + app_template = "securedrop/translations/{}/LC_MESSAGES/messages.po" + desktop_template = "install_files/ansible-base/roles/tails-config/templates/{}.po" + last_sync = self.get_last_sync() + for code, info in sorted(I18NTool.SUPPORTED_LANGUAGES.items()): + translators = set([]) + paths = [ + app_template.format(code), + desktop_template.format(info["desktop"]), + ] + for path in paths: + try: + commit_range = "i18n/i18n" + if last_sync and not args.all: + commit_range = '{}..{}'.format(last_sync, commit_range) + t = self.translators(args, path, commit_range) + translators.update(t) + except Exception as e: + print("Could not check git history of {}: {}".format(path, e), file=sys.stderr) + print(u"{} ({}):\n {}".format(code, info["name"], "\n ".join(sorted(translators)))) + + def get_args(self): + parser = argparse.ArgumentParser( + prog=__file__, + description='i18n tool for SecureDrop.') + parser.add_argument('-v', '--verbose', action='store_true') + subps = parser.add_subparsers() + + self.set_translate_messages_parser(subps) + self.set_translate_desktop_parser(subps) + self.set_update_docs_parser(subps) + self.set_update_from_weblate_parser(subps) + self.set_list_translators_parser(subps) + self.set_list_locales_parser(subps) + + return parser + + def setup_verbosity(self, args): + if args.verbose: + logging.getLogger('sh.command').setLevel(logging.INFO) + log.setLevel(logging.DEBUG) + else: + log.setLevel(logging.INFO) + + def main(self, argv): + try: + args = self.get_args().parse_args(argv) + self.setup_verbosity(args) + return args.func(args) + except KeyboardInterrupt: + return signal.SIGINT + + +if __name__ == '__main__': # pragma: no cover + sys.exit(I18NTool().main(sys.argv[1:])) diff --git a/securedrop/journalist.py b/securedrop/journalist.py --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -1,12 +1,27 @@ # -*- coding: utf-8 -*- -import config +from sdconfig import config from journalist_app import create_app +from models import Source +from source_app.utils import asynchronous app = create_app(config) +@asynchronous +def prime_keycache(): + """ + Preloads CryptoUtil.keycache. + """ + with app.app_context(): + for source in Source.query.filter_by(pending=False).all(): + app.crypto_util.getkey(source.filesystem_id) + + +prime_keycache() + + if __name__ == "__main__": # pragma: no cover debug = getattr(config, 'env', 'prod') != 'prod' - app.run(debug=debug, host='0.0.0.0', port=8081) + app.run(debug=debug, host='0.0.0.0', port=8081) # nosec diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -1,48 +1,116 @@ # -*- coding: utf-8 -*- from datetime import datetime, timedelta -from flask import Flask, session, redirect, url_for, flash, g, request +from flask import (Flask, session, redirect, url_for, flash, g, request, + render_template) from flask_assets import Environment from flask_babel import gettext from flask_wtf.csrf import CSRFProtect, CSRFError from os import path +import sys +from werkzeug.exceptions import default_exceptions import i18n import template_filters import version -from db import db_session, Journalist -from journalist_app import account, admin, main, col -from journalist_app.utils import get_source, logged_in - -_insecure_views = ['main.login', 'static'] +from crypto_util import CryptoUtil +from db import db +from journalist_app import account, admin, api, main, col +from journalist_app.utils import (get_source, logged_in, + JournalistInterfaceSessionInterface, + cleanup_expired_revoked_tokens) +from models import InstanceConfig, Journalist +from store import Storage + +import typing +# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from sdconfig import SDConfig # noqa: F401 + from typing import Optional, Union, Tuple, Any # noqa: F401 + from werkzeug import Response # noqa: F401 + from werkzeug.exceptions import HTTPException # noqa: F401 + +_insecure_views = ['main.login', 'main.select_logo', 'static'] def create_app(config): + # type: (SDConfig) -> Flask app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR, static_folder=path.join(config.SECUREDROP_ROOT, 'static')) - app.config.from_object(config.JournalistInterfaceFlaskConfig) + app.config.from_object(config.JournalistInterfaceFlaskConfig) # type: ignore + app.sdconfig = config + app.session_interface = JournalistInterfaceSessionInterface() - CSRFProtect(app) + csrf = CSRFProtect(app) Environment(app) + if config.DATABASE_ENGINE == "sqlite": + db_uri = (config.DATABASE_ENGINE + ":///" + + config.DATABASE_FILE) + else: + db_uri = ( + config.DATABASE_ENGINE + '://' + + config.DATABASE_USERNAME + ':' + + config.DATABASE_PASSWORD + '@' + + config.DATABASE_HOST + '/' + + config.DATABASE_NAME + ) + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + app.config['SQLALCHEMY_DATABASE_URI'] = db_uri + db.init_app(app) + + app.storage = Storage(config.STORE_DIR, + config.TEMP_DIR, + config.JOURNALIST_KEY) + + app.crypto_util = CryptoUtil( + scrypt_params=config.SCRYPT_PARAMS, + scrypt_id_pepper=config.SCRYPT_ID_PEPPER, + scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER, + securedrop_root=config.SECUREDROP_ROOT, + word_list=config.WORD_LIST, + nouns_file=config.NOUNS, + adjectives_file=config.ADJECTIVES, + gpg_key_dir=config.GPG_KEY_DIR, + ) + @app.errorhandler(CSRFError) def handle_csrf_error(e): + # type: (CSRFError) -> Response # render the message first to ensure it's localized. msg = gettext('You have been logged out due to inactivity') session.clear() flash(msg, 'error') return redirect(url_for('main.login')) + def _handle_http_exception(error): + # type: (HTTPException) -> Tuple[Union[Response, str], Optional[int]] + # Workaround for no blueprint-level 404/5 error handlers, see: + # https://github.com/pallets/flask/issues/503#issuecomment-71383286 + handler = list(app.error_handler_spec['api'][error.code].values())[0] + if request.path.startswith('/api/') and handler: + return handler(error) + + return render_template('error.html', error=error), error.code + + for code in default_exceptions: + app.errorhandler(code)(_handle_http_exception) + i18n.setup_app(config, app) app.jinja_env.trim_blocks = True app.jinja_env.lstrip_blocks = True app.jinja_env.globals['version'] = version.__version__ if hasattr(config, 'CUSTOM_HEADER_IMAGE'): - app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE + app.jinja_env.globals['header_image'] = \ + config.CUSTOM_HEADER_IMAGE # type: ignore app.jinja_env.globals['use_custom_header_image'] = True else: app.jinja_env.globals['header_image'] = 'logo.png' @@ -52,36 +120,48 @@ def handle_csrf_error(e): template_filters.rel_datetime_format app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat - @app.template_filter('autoversion') - def autoversion_filter(filename): - """Use this template filter for cache busting""" - absolute_filename = path.join(config.SECUREDROP_ROOT, filename[1:]) - if path.exists(absolute_filename): - timestamp = str(path.getmtime(absolute_filename)) - else: - return filename - versioned_filename = "{0}?v={1}".format(filename, timestamp) - return versioned_filename - - @app.teardown_appcontext - def shutdown_session(exception=None): - """Automatically remove database sessions at the end of the request, or - when the application shuts down""" - db_session.remove() + @app.before_first_request + def expire_blacklisted_tokens(): + return cleanup_expired_revoked_tokens() + + @app.before_request + def load_instance_config(): + app.instance_config = InstanceConfig.get_current() @app.before_request def setup_g(): + # type: () -> Optional[Response] """Store commonly used values in Flask's special g object""" if 'expires' in session and datetime.utcnow() >= session['expires']: session.clear() flash(gettext('You have been logged out due to inactivity'), 'error') + uid = session.get('uid', None) + if uid: + user = Journalist.query.get(uid) + if user and 'nonce' in session and \ + session['nonce'] != user.session_nonce: + session.clear() + flash(gettext('You have been logged out due to password change'), + 'error') + session['expires'] = datetime.utcnow() + \ timedelta(minutes=getattr(config, 'SESSION_EXPIRATION_MINUTES', 120)) + # Work around https://github.com/lepture/flask-wtf/issues/275 + # -- after upgrading from Python 2 to Python 3, any existing + # session's csrf_token value will be retrieved as bytes, + # causing a TypeError. This simple fix, deleting the existing + # token, was suggested in the issue comments. This code will + # be safe to remove after Python 2 reaches EOL in 2020, and no + # supported SecureDrop installations can still have this + # problem. + if sys.version_info.major > 2 and type(session.get('csrf_token')) is bytes: + del session['csrf_token'] + uid = session.get('uid', None) if uid: g.user = Journalist.query.get(uid) @@ -91,8 +171,11 @@ def setup_g(): g.html_lang = i18n.locale_to_rfc_5646(g.locale) g.locales = i18n.get_locale2name() - if request.endpoint not in _insecure_views and not logged_in(): - return redirect(url_for('main.login')) + if request.path.split('/')[1] == 'api': + pass # We use the @token_required decorator for the API endpoints + else: # We are not using the API + if request.endpoint not in _insecure_views and not logged_in(): + return redirect(url_for('main.login')) if request.method == 'POST': filesystem_id = request.form.get('filesystem_id') @@ -100,10 +183,15 @@ def setup_g(): g.filesystem_id = filesystem_id g.source = get_source(filesystem_id) + return None + app.register_blueprint(main.make_blueprint(config)) app.register_blueprint(account.make_blueprint(config), url_prefix='/account') app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin') app.register_blueprint(col.make_blueprint(config), url_prefix='/col') + api_blueprint = api.make_blueprint(config) + app.register_blueprint(api_blueprint, url_prefix='/api/v1') + csrf.exempt(api_blueprint) return app diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py --- a/securedrop/journalist_app/account.py +++ b/securedrop/journalist_app/account.py @@ -4,9 +4,9 @@ flash, session) from flask_babel import gettext -from db import db_session -from journalist_app.utils import (make_password, set_diceware_password, - validate_user) +from db import db +from journalist_app.utils import (make_password, set_diceware_password, set_name, validate_user, + validate_hotp_secret) def make_blueprint(config): @@ -18,6 +18,13 @@ def edit(): return render_template('edit_account.html', password=password) + @view.route('/change-name', methods=('POST',)) + def change_name(): + first_name = request.form.get('first_name') + last_name = request.form.get('last_name') + set_name(g.user, first_name, last_name) + return redirect(url_for('account.edit')) + @view.route('/new-password', methods=('POST',)) def new_password(): user = g.user @@ -39,12 +46,12 @@ def new_two_factor(): if request.method == 'POST': token = request.form['token'] if g.user.verify_token(token): - flash(gettext("Token in two-factor authentication verified."), + flash(gettext("Your two-factor credentials have been reset successfully."), "notification") return redirect(url_for('account.edit')) else: flash(gettext( - "Could not verify token in two-factor authentication."), + "There was a problem verifying the two-factor code. Please try again."), "error") return render_template('account_new_two_factor.html', user=g.user) @@ -53,15 +60,17 @@ def new_two_factor(): def reset_two_factor_totp(): g.user.is_totp = True g.user.regenerate_totp_shared_secret() - db_session.commit() + db.session.commit() return redirect(url_for('account.new_two_factor')) @view.route('/reset-2fa-hotp', methods=['POST']) def reset_two_factor_hotp(): otp_secret = request.form.get('otp_secret', None) if otp_secret: + if not validate_hotp_secret(g.user, otp_secret): + return render_template('account_edit_hotp_secret.html') g.user.set_hotp_secret(otp_secret) - db_session.commit() + db.session.commit() return redirect(url_for('account.new_two_factor')) else: return render_template('account_edit_hotp_secret.html') diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -8,12 +8,13 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound -from db import (db_session, Journalist, InvalidUsernameException, - PasswordError) +from db import db +from models import (InstanceConfig, Journalist, InvalidUsernameException, + FirstOrLastNameError, PasswordError) from journalist_app.decorators import admin_required -from journalist_app.utils import (make_password, commit_account_changes, - set_diceware_password) -from journalist_app.forms import LogoForm, NewUserForm +from journalist_app.utils import (make_password, commit_account_changes, set_diceware_password, + validate_hotp_secret, revoke_token) +from journalist_app.forms import LogoForm, NewUserForm, SubmissionPreferencesForm def make_blueprint(config): @@ -28,16 +29,39 @@ def index(): @view.route('/config', methods=('GET', 'POST')) @admin_required def manage_config(): - form = LogoForm() - if form.validate_on_submit(): - f = form.logo.data - static_filepath = os.path.join(config.SECUREDROP_ROOT, - "static/i/logo.png") - f.save(static_filepath) - flash(gettext("Image updated."), "notification") - return redirect(url_for("admin.manage_config")) + # The UI prompt ("prevent") is the opposite of the setting ("allow"): + submission_preferences_form = SubmissionPreferencesForm( + prevent_document_uploads=not current_app.instance_config.allow_document_uploads) + logo_form = LogoForm() + if logo_form.validate_on_submit(): + f = logo_form.logo.data + custom_logo_filepath = os.path.join(current_app.static_folder, 'i', + 'custom_logo.png') + try: + f.save(custom_logo_filepath) + flash(gettext("Image updated."), "logo-success") + except Exception: + flash("Unable to process the image file." + " Try another one.", "logo-error") + finally: + return redirect(url_for("admin.manage_config")) else: - return render_template("config.html", form=form) + for field, errors in list(logo_form.errors.items()): + for error in errors: + flash(error, "logo-error") + return render_template("config.html", + submission_preferences_form=submission_preferences_form, + logo_form=logo_form) + + @view.route('/update-submission-preferences', methods=['POST']) + @admin_required + def update_submission_preferences(): + form = SubmissionPreferencesForm() + if form.validate_on_submit(): + # The UI prompt ("prevent") is the opposite of the setting ("allow"): + value = not bool(request.form.get('prevent_document_uploads')) + InstanceConfig.set('allow_document_uploads', value) + return redirect(url_for('admin.manage_config')) @view.route('/add', methods=('GET', 'POST')) @admin_required @@ -46,6 +70,8 @@ def add_user(): if form.validate_on_submit(): form_valid = True username = request.form['username'] + first_name = request.form['first_name'] + last_name = request.form['last_name'] password = request.form['password'] is_admin = bool(request.form.get('is_admin')) @@ -55,10 +81,12 @@ def add_user(): otp_secret = request.form.get('otp_secret', '') new_user = Journalist(username=username, password=password, + first_name=first_name, + last_name=last_name, is_admin=is_admin, otp_secret=otp_secret) - db_session.add(new_user) - db_session.commit() + db.session.add(new_user) + db.session.commit() except PasswordError: flash(gettext( 'There was an error with the autogenerated password. ' @@ -68,15 +96,15 @@ def add_user(): form_valid = False flash('Invalid username: ' + str(e), "error") except IntegrityError as e: - db_session.rollback() + db.session.rollback() form_valid = False if "UNIQUE constraint failed: journalists.username" in str(e): - flash(gettext("That username is already in use"), - "error") + flash(gettext('Username "{user}" already taken.'.format( + user=username)), "error") else: flash(gettext("An error occurred saving this user" " to the database." - " Please inform your administrator."), + " Please inform your admin."), "error") current_app.logger.error("Adding user " "'{}' failed: {}".format( @@ -99,14 +127,13 @@ def new_user_two_factor(): token = request.form['token'] if user.verify_token(token): flash(gettext( - "Token in two-factor authentication " - "accepted for user {user}.").format( - user=user.username), + "The two-factor code for user \"{user}\" was verified " + "successfully.").format(user=user.username), "notification") return redirect(url_for("admin.index")) else: flash(gettext( - "Could not verify token in two-factor authentication."), + "There was a problem verifying the two-factor code. Please try again."), "error") return render_template("admin_new_user_two_factor.html", user=user) @@ -118,7 +145,7 @@ def reset_two_factor_totp(): user = Journalist.query.get(uid) user.is_totp = True user.regenerate_totp_shared_secret() - db_session.commit() + db.session.commit() return redirect(url_for('admin.new_user_two_factor', uid=uid)) @view.route('/reset-2fa-hotp', methods=['POST']) @@ -128,30 +155,10 @@ def reset_two_factor_hotp(): otp_secret = request.form.get('otp_secret', None) if otp_secret: user = Journalist.query.get(uid) - try: - user.set_hotp_secret(otp_secret) - except TypeError as e: - if "Non-hexadecimal digit found" in str(e): - flash(gettext( - "Invalid secret format: " - "please only submit letters A-F and numbers 0-9."), - "error") - elif "Odd-length string" in str(e): - flash(gettext( - "Invalid secret format: " - "odd-length secret. Did you mistype the secret?"), - "error") - else: - flash(gettext( - "An unexpected error occurred! " - "Please inform your administrator."), "error") - current_app.logger.error( - "set_hotp_secret '{}' (id {}) failed: {}".format( - otp_secret, uid, e)) + if not validate_hotp_secret(user, otp_secret): return render_template('admin_edit_hotp_secret.html', uid=uid) - else: - db_session.commit() - return redirect(url_for('admin.new_user_two_factor', uid=uid)) + db.session.commit() + return redirect(url_for('admin.new_user_two_factor', uid=uid)) else: return render_template('admin_edit_hotp_secret.html', uid=uid) @@ -184,6 +191,22 @@ def edit_user(user_id): else: user.username = new_username + try: + first_name = request.form['first_name'] + Journalist.check_name_acceptable(first_name) + user.first_name = first_name + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") + return redirect(url_for("admin.edit_user", user_id=user_id)) + + try: + last_name = request.form['last_name'] + Journalist.check_name_acceptable(last_name) + user.last_name = last_name + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") + return redirect(url_for("admin.edit_user", user_id=user_id)) + user.is_admin = bool(request.form.get('is_admin')) commit_account_changes(user) @@ -201,16 +224,27 @@ def set_password(user_id): abort(404) password = request.form.get('password') - set_diceware_password(user, password) + if set_diceware_password(user, password) is not False: + if user.last_token is not None: + revoke_token(user, user.last_token) + user.session_nonce += 1 + db.session.commit() + return redirect(url_for('admin.edit_user', user_id=user_id)) @view.route('/delete/<int:user_id>', methods=('POST',)) @admin_required def delete_user(user_id): user = Journalist.query.get(user_id) - if user: - db_session.delete(user) - db_session.commit() + if user_id == g.user.id: + # Do not flash because the interface already has safe guards. + # It can only happen by manually crafting a POST request + current_app.logger.error( + "Admin {} tried to delete itself".format(g.user.username)) + abort(403) + elif user: + db.session.delete(user) + db.session.commit() flash(gettext("Deleted user '{user}'").format( user=user.username), "notification") else: @@ -230,14 +264,19 @@ def new_password(user_id): abort(404) password = request.form.get('password') - set_diceware_password(user, password) + if set_diceware_password(user, password) is not False: + if user.last_token is not None: + revoke_token(user, user.last_token) + user.session_nonce += 1 + db.session.commit() return redirect(url_for('admin.edit_user', user_id=user_id)) @view.route('/ossec-test') @admin_required def ossec_test(): current_app.logger.error('This is a test OSSEC alert') - flash(gettext('Test alert sent. Check your email.'), 'notification') + flash(gettext('Test alert sent. Please check your email.'), + 'notification') return redirect(url_for('admin.manage_config')) return view diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py new file mode 100644 --- /dev/null +++ b/securedrop/journalist_app/api.py @@ -0,0 +1,337 @@ +import json + +from datetime import datetime, timedelta +from flask import abort, Blueprint, current_app, jsonify, request +from functools import wraps +from sqlalchemy.exc import IntegrityError +from os import path +from uuid import UUID +from werkzeug.exceptions import default_exceptions # type: ignore + +from db import db +from journalist_app import utils +from models import (Journalist, Reply, Source, Submission, + LoginThrottledException, InvalidUsernameException, + BadTokenException, WrongPasswordException) +from store import NotEncrypted + + +TOKEN_EXPIRATION_MINS = 60 * 8 + + +def get_user_object(request): + """Helper function to use in token_required views that need a user + object + """ + auth_token = request.headers.get('Authorization').split(" ")[1] + user = Journalist.validate_api_token_and_get_user(auth_token) + return user + + +def token_required(f): + @wraps(f) + def decorated_function(*args, **kwargs): + try: + auth_header = request.headers['Authorization'] + except KeyError: + return abort(403, 'API token not found in Authorization header.') + + if auth_header: + split = auth_header.split(" ") + if len(split) != 2 or split[0] != 'Token': + abort(403, 'Malformed authorization header.') + auth_token = split[1] + else: + auth_token = '' + if not Journalist.validate_api_token_and_get_user(auth_token): + return abort(403, 'API token is invalid or expired.') + return f(*args, **kwargs) + return decorated_function + + +def get_or_404(model, object_id, column=''): + if column: + result = model.query.filter(column == object_id).one_or_none() + else: + result = model.query.get(object_id) + if result is None: + abort(404) + return result + + +def make_blueprint(config): + api = Blueprint('api', __name__) + + @api.route('/') + def get_endpoints(): + endpoints = {'sources_url': '/api/v1/sources', + 'current_user_url': '/api/v1/user', + 'submissions_url': '/api/v1/submissions', + 'replies_url': '/api/v1/replies', + 'auth_token_url': '/api/v1/token'} + return jsonify(endpoints), 200 + + # Before every post, we validate the payload before processing the request + @api.before_request + def validate_data(): + if request.method == 'POST': + # flag, star, and logout can have empty payloads + if not request.data: + dataless_endpoints = [ + 'add_star', + 'remove_star', + 'flag', + 'logout', + ] + for endpoint in dataless_endpoints: + if request.endpoint == 'api.' + endpoint: + return + return abort(400, 'malformed request') + # other requests must have valid JSON payload + else: + try: + json.loads(request.data.decode('utf-8')) + except (ValueError): + return abort(400, 'malformed request') + + @api.route('/token', methods=['POST']) + def get_token(): + creds = json.loads(request.data.decode('utf-8')) + + username = creds.get('username', None) + passphrase = creds.get('passphrase', None) + one_time_code = creds.get('one_time_code', None) + + if username is None: + return abort(400, 'username field is missing') + if passphrase is None: + return abort(400, 'passphrase field is missing') + if one_time_code is None: + return abort(400, 'one_time_code field is missing') + + try: + journalist = Journalist.login(username, passphrase, one_time_code) + token_expiry = datetime.utcnow() + timedelta( + seconds=TOKEN_EXPIRATION_MINS * 60) + + response = jsonify({ + 'token': journalist.generate_api_token(expiration=TOKEN_EXPIRATION_MINS * 60), + 'expiration': token_expiry.isoformat() + 'Z', + 'journalist_uuid': journalist.uuid, + 'journalist_first_name': journalist.first_name, + 'journalist_last_name': journalist.last_name, + }) + + # Update access metadata + journalist.last_access = datetime.utcnow() + db.session.add(journalist) + db.session.commit() + + return response, 200 + except (LoginThrottledException, InvalidUsernameException, + BadTokenException, WrongPasswordException): + return abort(403, 'Token authentication failed.') + + @api.route('/sources', methods=['GET']) + @token_required + def get_all_sources(): + sources = Source.query.filter_by(pending=False).all() + return jsonify( + {'sources': [source.to_json() for source in sources]}), 200 + + @api.route('/sources/<source_uuid>', methods=['GET', 'DELETE']) + @token_required + def single_source(source_uuid): + if request.method == 'GET': + source = get_or_404(Source, source_uuid, column=Source.uuid) + return jsonify(source.to_json()), 200 + elif request.method == 'DELETE': + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.delete_collection(source.filesystem_id) + return jsonify({'message': 'Source and submissions deleted'}), 200 + + @api.route('/sources/<source_uuid>/add_star', methods=['POST']) + @token_required + def add_star(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.make_star_true(source.filesystem_id) + db.session.commit() + return jsonify({'message': 'Star added'}), 201 + + @api.route('/sources/<source_uuid>/remove_star', methods=['DELETE']) + @token_required + def remove_star(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + utils.make_star_false(source.filesystem_id) + db.session.commit() + return jsonify({'message': 'Star removed'}), 200 + + @api.route('/sources/<source_uuid>/flag', methods=['POST']) + @token_required + def flag(source_uuid): + source = get_or_404(Source, source_uuid, + column=Source.uuid) + source.flagged = True + db.session.commit() + return jsonify({'message': 'Source flagged for reply'}), 200 + + @api.route('/sources/<source_uuid>/submissions', methods=['GET']) + @token_required + def all_source_submissions(source_uuid): + source = get_or_404(Source, source_uuid, column=Source.uuid) + return jsonify( + {'submissions': [submission.to_json() for + submission in source.submissions]}), 200 + + @api.route('/sources/<source_uuid>/submissions/<submission_uuid>/download', # noqa + methods=['GET']) + @token_required + def download_submission(source_uuid, submission_uuid): + get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, + column=Submission.uuid) + + # Mark as downloaded + submission.downloaded = True + db.session.commit() + + return utils.serve_file_with_etag(submission) + + @api.route('/sources/<source_uuid>/replies/<reply_uuid>/download', + methods=['GET']) + @token_required + def download_reply(source_uuid, reply_uuid): + get_or_404(Source, source_uuid, column=Source.uuid) + reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) + + return utils.serve_file_with_etag(reply) + + @api.route('/sources/<source_uuid>/submissions/<submission_uuid>', + methods=['GET', 'DELETE']) + @token_required + def single_submission(source_uuid, submission_uuid): + if request.method == 'GET': + get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) + return jsonify(submission.to_json()), 200 + elif request.method == 'DELETE': + get_or_404(Source, source_uuid, column=Source.uuid) + submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) + utils.delete_file_object(submission) + return jsonify({'message': 'Submission deleted'}), 200 + + @api.route('/sources/<source_uuid>/replies', methods=['GET', 'POST']) + @token_required + def all_source_replies(source_uuid): + if request.method == 'GET': + source = get_or_404(Source, source_uuid, column=Source.uuid) + return jsonify( + {'replies': [reply.to_json() for + reply in source.replies]}), 200 + elif request.method == 'POST': + source = get_or_404(Source, source_uuid, + column=Source.uuid) + if request.json is None: + abort(400, 'please send requests in valid JSON') + + if 'reply' not in request.json: + abort(400, 'reply not found in request body') + + user = get_user_object(request) + + data = request.json + if not data['reply']: + abort(400, 'reply should not be empty') + + source.interaction_count += 1 + try: + filename = current_app.storage.save_pre_encrypted_reply( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + data['reply']) + except NotEncrypted: + return jsonify( + {'message': 'You must encrypt replies client side'}), 400 + + # issue #3918 + filename = path.basename(filename) + + reply = Reply(user, source, filename) + + reply_uuid = data.get('uuid', None) + if reply_uuid is not None: + # check that is is parseable + try: + UUID(reply_uuid) + except ValueError: + abort(400, "'uuid' was not a valid UUID") + reply.uuid = reply_uuid + + try: + db.session.add(reply) + db.session.add(source) + db.session.commit() + except IntegrityError as e: + db.session.rollback() + if 'UNIQUE constraint failed: replies.uuid' in str(e): + abort(409, 'That UUID is already in use.') + else: + raise e + + return jsonify({'message': 'Your reply has been stored', + 'uuid': reply.uuid, + 'filename': reply.filename}), 201 + + @api.route('/sources/<source_uuid>/replies/<reply_uuid>', + methods=['GET', 'DELETE']) + @token_required + def single_reply(source_uuid, reply_uuid): + get_or_404(Source, source_uuid, column=Source.uuid) + reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) + if request.method == 'GET': + return jsonify(reply.to_json()), 200 + elif request.method == 'DELETE': + utils.delete_file_object(reply) + return jsonify({'message': 'Reply deleted'}), 200 + + @api.route('/submissions', methods=['GET']) + @token_required + def get_all_submissions(): + submissions = Submission.query.all() + return jsonify({'submissions': [submission.to_json() for + submission in submissions]}), 200 + + @api.route('/replies', methods=['GET']) + @token_required + def get_all_replies(): + replies = Reply.query.all() + return jsonify( + {'replies': [reply.to_json() for reply in replies]}), 200 + + @api.route('/user', methods=['GET']) + @token_required + def get_current_user(): + user = get_user_object(request) + return jsonify(user.to_json()), 200 + + @api.route('/logout', methods=['POST']) + @token_required + def logout(): + user = get_user_object(request) + auth_token = request.headers.get('Authorization').split(" ")[1] + utils.revoke_token(user, auth_token) + return jsonify({'message': 'Your token has been revoked.'}), 200 + + def _handle_api_http_exception(error): + # Workaround for no blueprint-level 404/5 error handlers, see: + # https://github.com/pallets/flask/issues/503#issuecomment-71383286 + response = jsonify({'error': error.name, + 'message': error.description}) + + return response, error.code + + for code in default_exceptions: + api.errorhandler(code)(_handle_api_http_exception) + + return api diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -5,10 +5,8 @@ from flask_babel import gettext from sqlalchemy.orm.exc import NoResultFound -import crypto_util -import store - -from db import db_session, Submission +from db import db +from models import Submission from journalist_app.forms import ReplyForm from journalist_app.utils import (make_star_true, make_star_false, get_source, delete_collection, col_download_unread, @@ -22,20 +20,20 @@ def make_blueprint(config): @view.route('/add_star/<filesystem_id>', methods=('POST',)) def add_star(filesystem_id): make_star_true(filesystem_id) - db_session.commit() + db.session.commit() return redirect(url_for('main.index')) @view.route("/remove_star/<filesystem_id>", methods=('POST',)) def remove_star(filesystem_id): make_star_false(filesystem_id) - db_session.commit() + db.session.commit() return redirect(url_for('main.index')) @view.route('/<filesystem_id>') def col(filesystem_id): form = ReplyForm() source = get_source(filesystem_id) - source.has_key = crypto_util.getkey(filesystem_id) + source.has_key = current_app.crypto_util.getkey(filesystem_id) return render_template("col.html", filesystem_id=filesystem_id, source=source, form=form) @@ -69,20 +67,23 @@ def process(): return method(cols_selected) @view.route('/<filesystem_id>/<fn>') - def download_single_submission(filesystem_id, fn): - """Sends a client the contents of a single submission.""" + def download_single_file(filesystem_id, fn): + """Sends a client the contents of a single file, either a submission + or a journalist reply""" if '..' in fn or fn.startswith('/'): abort(404) - try: - Submission.query.filter( - Submission.filename == fn).one().downloaded = True - db_session.commit() - except NoResultFound as e: - current_app.logger.error( - "Could not mark " + fn + " as downloaded: %s" % (e,)) - - return send_file(store.path(filesystem_id, fn), + # only mark as read when it's a submission (and not a journalist reply) + if not fn.endswith('reply.gpg'): + try: + Submission.query.filter( + Submission.filename == fn).one().downloaded = True + db.session.commit() + except NoResultFound as e: + current_app.logger.error( + "Could not mark " + fn + " as downloaded: %s" % (e,)) + + return send_file(current_app.storage.path(filesystem_id, fn), mimetype="application/pgp-encrypted") return view diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py --- a/securedrop/journalist_app/decorators.py +++ b/securedrop/journalist_app/decorators.py @@ -12,8 +12,7 @@ def admin_required(func): def wrapper(*args, **kwargs): if logged_in() and g.user.is_admin: return func(*args, **kwargs) - # TODO: sometimes this gets flashed 2x (Chrome only?) - flash(gettext("Only administrators can access this page."), + flash(gettext("Only admins can access this page."), "notification") return redirect(url_for('main.index')) return wrapper diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py --- a/securedrop/journalist_app/forms.py +++ b/securedrop/journalist_app/forms.py @@ -7,15 +7,15 @@ ValidationError) from wtforms.validators import InputRequired, Optional -from db import Journalist +from models import Journalist def otp_secret_validation(form, field): strip_whitespace = field.data.replace(' ', '') if len(strip_whitespace) != 40: raise ValidationError(gettext( - 'Field must be 40 characters long but ' - 'got {num_chars}.'.format( + 'HOTP secrets are 40 characters long - ' + 'you have entered {num_chars}.'.format( num_chars=len(strip_whitespace) ))) @@ -30,11 +30,20 @@ def minimum_length_validation(form, field): num_chars=len(field.data)))) +def name_length_validation(form, field): + if len(field.data) > Journalist.MAX_NAME_LEN: + raise ValidationError(gettext( + 'Field can not be more than {max_chars} characters.' + .format(max_chars=Journalist.MAX_NAME_LEN))) + + class NewUserForm(FlaskForm): username = TextField('username', validators=[ InputRequired(message=gettext('This field is required.')), minimum_length_validation ]) + first_name = TextField('first_name', validators=[name_length_validation, Optional()]) + last_name = TextField('last_name', validators=[name_length_validation, Optional()]) password = HiddenField('password') is_admin = BooleanField('is_admin') is_hotp = BooleanField('is_hotp') @@ -46,7 +55,7 @@ class NewUserForm(FlaskForm): class ReplyForm(FlaskForm): message = TextAreaField( - u'Message', + 'Message', id="content-area", validators=[ InputRequired(message=gettext( @@ -55,9 +64,13 @@ class ReplyForm(FlaskForm): ) +class SubmissionPreferencesForm(FlaskForm): + prevent_document_uploads = BooleanField('prevent_document_uploads') + + class LogoForm(FlaskForm): logo = FileField(validators=[ FileRequired(message=gettext('File required.')), - FileAllowed(['jpg', 'png', 'jpeg'], - message=gettext('Upload images only.')) + FileAllowed(['png'], + message=gettext("You can only upload PNG image files.")) ]) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import os from datetime import datetime from flask import (Blueprint, request, current_app, session, url_for, redirect, @@ -6,10 +7,10 @@ from flask_babel import gettext from sqlalchemy.sql.expression import false -import crypto_util import store -from db import db_session, Source, SourceStar, Submission, Reply +from db import db +from models import Source, SourceStar, Submission, Reply from journalist_app.forms import ReplyForm from journalist_app.utils import (validate_user, bulk_delete, download, confirm_bulk_delete, get_source) @@ -25,16 +26,17 @@ def login(): request.form['password'], request.form['token']) if user: - current_app.logger.info("'{}' logged in with the token {}" + current_app.logger.info("'{}' logged in with the two-factor code {}" .format(request.form['username'], request.form['token'])) # Update access metadata user.last_access = datetime.utcnow() - db_session.add(user) - db_session.commit() + db.session.add(user) + db.session.commit() session['uid'] = user.id + session['nonce'] = user.session_nonce return redirect(url_for('main.index')) return render_template("login.html") @@ -43,8 +45,17 @@ def login(): def logout(): session.pop('uid', None) session.pop('expires', None) + session.pop('nonce', None) return redirect(url_for('main.index')) + @view.route('/org-logo') + def select_logo(): + if os.path.exists(os.path.join(current_app.static_folder, 'i', + 'custom_logo.png')): + return redirect(url_for('static', filename='i/custom_logo.png')) + else: + return redirect(url_for('static', filename='i/logo.png')) + @view.route('/') def index(): unstarred = [] @@ -54,6 +65,7 @@ def index(): # the Pocoo style guide, IMHO: # http://www.pocoo.org/internal/styleguide/ sources = Source.query.filter_by(pending=False) \ + .filter(Source.last_updated.isnot(None)) \ .order_by(Source.last_updated.desc()) \ .all() for source in sources: @@ -94,19 +106,22 @@ def reply(): g.source.interaction_count += 1 filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count, g.source.journalist_filename) - crypto_util.encrypt(form.message.data, - [crypto_util.getkey(g.filesystem_id), - config.JOURNALIST_KEY], - output=store.path(g.filesystem_id, filename)) + current_app.crypto_util.encrypt( + form.message.data, + [current_app.crypto_util.getkey(g.filesystem_id), + config.JOURNALIST_KEY], + output=current_app.storage.path(g.filesystem_id, filename), + ) reply = Reply(g.user, g.source, filename) try: - db_session.add(reply) - db_session.commit() + db.session.add(reply) + db.session.commit() + store.async_add_checksum_for_file(reply) except Exception as exc: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") # We take a cautious approach to logging here because we're dealing # with responses to sources. It's possible the exception message # could contain information we don't want to write to disk. @@ -123,7 +138,7 @@ def reply(): @view.route('/flag', methods=('POST',)) def flag(): g.source.flagged = True - db_session.commit() + db.session.commit() return render_template('flag.html', filesystem_id=g.filesystem_id, codename=g.source.journalist_designation) @@ -153,25 +168,6 @@ def bulk(): else: abort(400) - @view.route('/regenerate-code', methods=('POST',)) - def regenerate_code(): - original_journalist_designation = g.source.journalist_designation - g.source.journalist_designation = crypto_util.display_id() - - for item in g.source.collection: - item.filename = store.rename_submission( - g.filesystem_id, - item.filename, - g.source.journalist_filename) - db_session.commit() - - flash(gettext( - "The source '{original_name}' has been renamed to '{new_name}'") - .format(original_name=original_journalist_designation, - new_name=g.source.journalist_designation), - "notification") - return redirect(url_for('col.col', filesystem_id=g.filesystem_id)) - @view.route('/download_unread/<filesystem_id>') def download_unread_filesystem_id(filesystem_id): id = Source.query.filter(Source.filesystem_id == filesystem_id) \ diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -1,24 +1,32 @@ # -*- coding: utf-8 -*- +import binascii from datetime import datetime from flask import (g, flash, current_app, abort, send_file, redirect, url_for, - render_template, Markup) + render_template, Markup, sessions, request) from flask_babel import gettext, ngettext from sqlalchemy.sql.expression import false -import crypto_util import i18n -import store -import worker -from db import (db_session, get_one_or_else, Source, Journalist, - InvalidUsernameException, WrongPasswordException, - LoginThrottledException, BadTokenException, SourceStar, - PasswordError, Submission) -from rm import srm +from db import db +from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, + WrongPasswordException, FirstOrLastNameError, LoginThrottledException, + BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) +from store import add_checksum_for_file + +import typing +# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from sdconfig import SDConfig # noqa: F401 def logged_in(): + # type: () -> bool # When a user is logged in, we push their user ID (database primary key) # into the session. setup_g checks for this value, and if it finds it, # stores a reference to the user's Journalist object in g. @@ -30,17 +38,17 @@ def logged_in(): def commit_account_changes(user): - if db_session.is_modified(user): + if db.session.is_modified(user): try: - db_session.add(user) - db_session.commit() + db.session.add(user) + db.session.commit() except Exception as e: flash(gettext( "An unexpected error occurred! Please " - "inform your administrator."), "error") + "inform your admin."), "error") current_app.logger.error("Account changes for '{}' failed: {}" .format(user, e)) - db_session.rollback() + db.session.rollback() else: flash(gettext("Account updated."), "success") @@ -93,15 +101,48 @@ def validate_user(username, password, token, error_message=None): if user.is_totp: login_flashed_msg += " " login_flashed_msg += gettext( - "Please wait for a new two-factor token" - " before trying again.") - except: + "Please wait for a new code from your two-factor mobile" + " app or security key before trying again.") + except Exception: pass flash(login_flashed_msg, "error") return None +def validate_hotp_secret(user, otp_secret): + """ + Validates and sets the HOTP provided by a user + :param user: the change is for this instance of the User object + :param otp_secret: the new HOTP secret + :return: True if it validates, False if it does not + """ + try: + user.set_hotp_secret(otp_secret) + except (binascii.Error, TypeError) as e: + if "Non-hexadecimal digit found" in str(e): + flash(gettext( + "Invalid secret format: " + "please only submit letters A-F and numbers 0-9."), + "error") + return False + elif "Odd-length string" in str(e): + flash(gettext( + "Invalid secret format: " + "odd-length secret. Did you mistype the secret?"), + "error") + return False + else: + flash(gettext( + "An unexpected error occurred! " + "Please inform your admin."), "error") + current_app.logger.error( + "set_hotp_secret '{}' (id {}) failed: {}".format( + otp_secret, user.id, e)) + return False + return True + + def download(zip_basename, submissions): """Send client contents of ZIP-file *zip_basename*-<timestamp>.zip containing *submissions*. The ZIP-file, being a @@ -110,30 +151,34 @@ def download(zip_basename, submissions): :param str zip_basename: The basename of the ZIP-file download. - :param list submissions: A list of :class:`db.Submission`s to + :param list submissions: A list of :class:`models.Submission`s to include in the ZIP-file. """ - zf = store.get_bulk_archive(submissions, - zip_directory=zip_basename) + zf = current_app.storage.get_bulk_archive(submissions, + zip_directory=zip_basename) attachment_filename = "{}--{}.zip".format( zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) # Mark the submissions that have been downloaded as such for submission in submissions: submission.downloaded = True - db_session.commit() + db.session.commit() return send_file(zf.name, mimetype="application/zip", attachment_filename=attachment_filename, as_attachment=True) +def delete_file_object(file_object): + path = current_app.storage.path(file_object.source.filesystem_id, file_object.filename) + current_app.storage.move_to_shredder(path) + db.session.delete(file_object) + db.session.commit() + + def bulk_delete(filesystem_id, items_selected): for item in items_selected: - item_path = store.path(filesystem_id, item.filename) - worker.enqueue(srm, item_path) - db_session.delete(item) - db_session.commit() + delete_file_object(item) flash(ngettext("Submission deleted.", "{num} submissions deleted.".format( @@ -155,15 +200,15 @@ def make_star_true(filesystem_id): source.star.starred = True else: source_star = SourceStar(source) - db_session.add(source_star) + db.session.add(source_star) def make_star_false(filesystem_id): source = get_source(filesystem_id) if not source.star: source_star = SourceStar(source) - db_session.add(source_star) - db_session.commit() + db.session.add(source_star) + db.session.commit() source.star.starred = False @@ -171,7 +216,7 @@ def col_star(cols_selected): for filesystem_id in cols_selected: make_star_true(filesystem_id) - db_session.commit() + db.session.commit() return redirect(url_for('main.index')) @@ -179,7 +224,7 @@ def col_un_star(cols_selected): for filesystem_id in cols_selected: make_star_false(filesystem_id) - db_session.commit() + db.session.commit() return redirect(url_for('main.index')) @@ -199,8 +244,11 @@ def col_delete(cols_selected): def make_password(config): + # type: (SDConfig) -> str while True: - password = crypto_util.genrandomid(7, i18n.get_language(config)) + password = current_app.crypto_util.genrandomid( + 7, + i18n.get_language(config)) try: Journalist.check_password_acceptable(password) return password @@ -210,16 +258,25 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions - job = worker.enqueue(srm, store.path(filesystem_id)) + path = current_app.storage.path(filesystem_id) + current_app.storage.move_to_shredder(path) # Delete the source's reply keypair - crypto_util.delete_reply_keypair(filesystem_id) + current_app.crypto_util.delete_reply_keypair(filesystem_id) # Delete their entry in the db source = get_source(filesystem_id) - db_session.delete(source) - db_session.commit() - return job + db.session.delete(source) + db.session.commit() + + +def set_name(user, first_name, last_name): + try: + user.set_name(first_name, last_name) + db.session.commit() + flash(gettext('Name updated.'), "success") + except FirstOrLastNameError as e: + flash(gettext('Name not updated: {}'.format(e)), "error") def set_diceware_password(user, password): @@ -228,10 +285,10 @@ def set_diceware_password(user, password): except PasswordError: flash(gettext( 'You submitted a bad password! Password not changed.'), 'error') - return + return False try: - db_session.commit() + db.session.commit() except Exception: flash(gettext( 'There was an error, and the new password might not have been ' @@ -239,7 +296,7 @@ def set_diceware_password(user, password): 'out of your account, you should reset your password again.'), 'error') current_app.logger.error('Failed to update a valid password.') - return + return False # using Markup so the HTML isn't escaped flash(Markup("<p>" + gettext( @@ -247,6 +304,7 @@ def set_diceware_password(user, password): "save it in your KeePassX database. New password:") + ' <span><code>{}</code></span></p>'.format(password)), 'success') + return True def col_download_unread(cols_selected): @@ -274,3 +332,52 @@ def col_download_all(cols_selected): submissions += Submission.query.filter( Submission.source_id == id).all() return download("all", submissions) + + +def serve_file_with_etag(db_obj): + file_path = current_app.storage.path(db_obj.source.filesystem_id, db_obj.filename) + response = send_file(file_path, + mimetype="application/pgp-encrypted", + as_attachment=True, + add_etags=False) # Disable Flask default ETag + + if not db_obj.checksum: + add_checksum_for_file(db.session, db_obj, file_path) + + response.direct_passthrough = False + response.headers['Etag'] = db_obj.checksum + return response + + +class JournalistInterfaceSessionInterface( + sessions.SecureCookieSessionInterface): + """A custom session interface that skips storing sessions for api requests but + otherwise just uses the default behaviour.""" + def save_session(self, app, session, response): + # If this is an api request do not save the session + if request.path.split("/")[1] == "api": + return + else: + super(JournalistInterfaceSessionInterface, self).save_session( + app, session, response) + + +def cleanup_expired_revoked_tokens(): + """Remove tokens that have now expired from the revoked token table.""" + + revoked_tokens = db.session.query(RevokedToken).all() + + for revoked_token in revoked_tokens: + if Journalist.validate_token_is_not_expired_or_invalid(revoked_token.token): + pass # The token has not expired, we must keep in the revoked token table. + else: + # The token is no longer valid, remove from the revoked token table. + db.session.delete(revoked_token) + + db.session.commit() + + +def revoke_token(user, auth_token): + revoked_token = RevokedToken(token=auth_token, journalist_id=user.id) + db.session.add(revoked_token) + db.session.commit() diff --git a/securedrop/manage.py b/securedrop/manage.py --- a/securedrop/manage.py +++ b/securedrop/manage.py @@ -1,76 +1,55 @@ -#!/usr/bin/env python +#!/opt/venvs/securedrop-app-code/bin/python # -*- coding: utf-8 -*- import argparse -import codecs import logging import os -from os.path import dirname, join, realpath +import pwd +import subprocess import shutil import signal -import subprocess import sys import time import traceback -import version + +sys.path.insert(0, "/var/www/securedrop") # noqa: E402 import qrcode +from flask import current_app from sqlalchemy.orm.exc import NoResultFound os.environ['SECUREDROP_ENV'] = 'dev' # noqa -import config -import crypto_util -from db import (db_session, init_db, Journalist, PasswordError, - InvalidUsernameException) + +from db import db +from models import ( + FirstOrLastNameError, + InvalidUsernameException, + Journalist, + PasswordError, +) +from management import app_context, config from management.run import run +from management.submissions import ( + add_check_db_disconnect_parser, + add_check_fs_disconnect_parser, + add_delete_db_disconnect_parser, + add_delete_fs_disconnect_parser, + add_list_db_disconnect_parser, + add_list_fs_disconnect_parser, + add_were_there_submissions_today, +) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s') log = logging.getLogger(__name__) -def sh(command, input=None): - """Run the *command* which must be a shell snippet. The stdin is - either /dev/null or the *input* argument string. +def obtain_input(text): + """Wrapper for testability as suggested in + https://github.com/pytest-dev/pytest/issues/1598#issuecomment-224761877""" + return input(text) - The stderr/stdout of the snippet are captured and logged via - logging.debug(), one line at a time. - """ - log.debug(":sh: " + command) - if input is None: - stdin = None - else: - stdin = subprocess.PIPE - proc = subprocess.Popen( - args=command, - stdin=stdin, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, - bufsize=1) - if stdin is not None: - proc.stdin.write(input) - proc.stdin.close() - lines_of_command_output = [] - loggable_line_list = [] - with proc.stdout: - for line in iter(proc.stdout.readline, b''): - line = line.decode('utf-8') - lines_of_command_output.append(line) - loggable_line = line.strip().encode('ascii', 'ignore') - log.debug(loggable_line) - loggable_line_list.append(loggable_line) - if proc.wait() != 0: - if log.getEffectiveLevel() > logging.DEBUG: - for loggable_line in loggable_line_list: - log.error(loggable_line) - raise subprocess.CalledProcessError( - returncode=proc.returncode, - cmd=command - ) - return "".join(lines_of_command_output) - - -def reset(args): + +def reset(args, context=None): """Clears the SecureDrop development applications' state, restoring them to the way they were immediately after running `setup_dev.sh`. This command: 1. Erases the development sqlite database file. @@ -79,27 +58,59 @@ def reset(args): """ # Erase the development db file if not hasattr(config, 'DATABASE_FILE'): - raise Exception("TODO: ./manage.py doesn't know how to clear the db " + raise Exception("./manage.py doesn't know how to clear the db " 'if the backend is not sqlite') + + # we need to save some data about the old DB file so we can recreate it + # with the same state + try: + stat_res = os.stat(config.DATABASE_FILE) + uid = stat_res.st_uid + gid = stat_res.st_gid + except OSError: + uid = os.getuid() + gid = os.getgid() + try: os.remove(config.DATABASE_FILE) except OSError: pass # Regenerate the database - init_db() + # 1. Create it + subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases']) + # 2. Set permissions on it + os.chown(config.DATABASE_FILE, uid, gid) + os.chmod(config.DATABASE_FILE, 0o0640) + + if os.environ.get('SECUREDROP_ENV') == 'dev': + # 3. Create the DB from the metadata directly when in 'dev' so + # developers can test application changes without first writing + # alembic migration. + with context or app_context(): + db.create_all() + else: + # We have to override the hardcoded .ini file because during testing + # the value in the .ini doesn't exist. + ini_dir = os.path.dirname(getattr(config, + 'TEST_ALEMBIC_INI', + 'alembic.ini')) + + # 3. Migrate it to 'head' + subprocess.check_call('cd {} && alembic upgrade head'.format(ini_dir), + shell=True) # nosec # Clear submission/reply storage try: - os.stat(config.STORE_DIR) + os.stat(args.store_dir) except OSError: pass else: - for source_dir in os.listdir(config.STORE_DIR): + for source_dir in os.listdir(args.store_dir): try: # Each entry in STORE_DIR is a directory corresponding # to a source - shutil.rmtree(os.path.join(config.STORE_DIR, source_dir)) + shutil.rmtree(os.path.join(args.store_dir, source_dir)) except OSError: pass return 0 @@ -115,7 +126,7 @@ def add_journalist(args): def _get_username(): while True: - username = raw_input('Username: ') + username = obtain_input('Username: ') try: Journalist.check_username_acceptable(username) except InvalidUsernameException as e: @@ -124,22 +135,46 @@ def _get_username(): return username +def _get_first_name(): + while True: + first_name = obtain_input('First name: ') + if not first_name: + return None + try: + Journalist.check_name_acceptable(first_name) + return first_name + except FirstOrLastNameError as e: + print('Invalid name: ' + str(e)) + + +def _get_last_name(): + while True: + last_name = obtain_input('Last name: ') + if not last_name: + return None + try: + Journalist.check_name_acceptable(last_name) + return last_name + except FirstOrLastNameError as e: + print('Invalid name: ' + str(e)) + + def _get_yubikey_usage(): '''Function used to allow for test suite mocking''' while True: - answer = raw_input('Will this user be using a YubiKey [HOTP]? ' - '(y/N): ').lower().strip() + answer = obtain_input('Will this user be using a YubiKey [HOTP]? ' + '(y/N): ').lower().strip() if answer in ('y', 'yes'): return True elif answer in ('', 'n', 'no'): return False else: - print 'Invalid answer. Please type "y" or "n"' + print('Invalid answer. Please type "y" or "n"') def _make_password(): while True: - password = crypto_util.genrandomid(7) + password = current_app.crypto_util.genrandomid(7) try: Journalist.check_password_acceptable(password) return password @@ -147,73 +182,78 @@ def _make_password(): continue -def _add_user(is_admin=False): - username = _get_username() - - print("Note: Passwords are now autogenerated.") - password = _make_password() - print("This user's password is: {}".format(password)) - - is_hotp = _get_yubikey_usage() - otp_secret = None - if is_hotp: - while True: - otp_secret = raw_input( - "Please configure this user's YubiKey and enter the secret: ") - if otp_secret: - tmp_str = otp_secret.replace(" ", "") - if len(tmp_str) != 40: - print("The length of the secret is not correct. " - "Expected 40 characters, but received {0}. " - "Try again.".format(len(tmp_str))) - continue - if otp_secret: - break +def _add_user(is_admin=False, context=None): + with context or app_context(): + username = _get_username() + first_name = _get_first_name() + last_name = _get_last_name() + + print("Note: Passwords are now autogenerated.") + password = _make_password() + print("This user's password is: {}".format(password)) + + is_hotp = _get_yubikey_usage() + otp_secret = None + if is_hotp: + while True: + otp_secret = obtain_input( + "Please configure this user's YubiKey and enter the " + "secret: ") + if otp_secret: + tmp_str = otp_secret.replace(" ", "") + if len(tmp_str) != 40: + print("The length of the secret is not correct. " + "Expected 40 characters, but received {0}. " + "Try again.".format(len(tmp_str))) + continue + if otp_secret: + break - try: - user = Journalist(username=username, - password=password, - is_admin=is_admin, - otp_secret=otp_secret) - db_session.add(user) - db_session.commit() - except Exception as exc: - db_session.rollback() - if "UNIQUE constraint failed: journalists.username" in str(exc): - print('ERROR: That username is already taken!') + try: + user = Journalist(username=username, + first_name=first_name, + last_name=last_name, + password=password, + is_admin=is_admin, + otp_secret=otp_secret) + db.session.add(user) + db.session.commit() + except Exception as exc: + db.session.rollback() + if "UNIQUE constraint failed: journalists.username" in str(exc): + print('ERROR: That username is already taken!') + else: + exc_type, exc_value, exc_traceback = sys.exc_info() + print(repr(traceback.format_exception(exc_type, exc_value, + exc_traceback))) + return 1 else: - exc_type, exc_value, exc_traceback = sys.exc_info() - print(repr(traceback.format_exception(exc_type, exc_value, - exc_traceback))) - return 1 - else: - print('User "{}" successfully added'.format(username)) - if not otp_secret: - # Print the QR code for FreeOTP - print('\nScan the QR code below with FreeOTP:\n') - uri = user.totp.provisioning_uri(username, - issuer_name='SecureDrop') - qr = qrcode.QRCode() - qr.add_data(uri) - sys.stdout = codecs.getwriter("utf-8")(sys.stdout) - qr.print_ascii(tty=sys.stdout.isatty()) - print('\nIf the barcode does not render correctly, try changing ' - "your terminal's font (Monospace for Linux, Menlo for OS " - 'X). If you are using iTerm on Mac OS X, you will need to ' - 'change the "Non-ASCII Font", which is your profile\'s Text ' - "settings.\n\nCan't scan the barcode? Enter following " - 'shared secret ' - 'manually:\n{}\n'.format(user.formatted_otp_secret)) + print('User "{}" successfully added'.format(username)) + if not otp_secret: + # Print the QR code for FreeOTP + print('\nScan the QR code below with FreeOTP:\n') + uri = user.totp.provisioning_uri(username, + issuer_name='SecureDrop') + qr = qrcode.QRCode() + qr.add_data(uri) + qr.print_ascii(tty=sys.stdout.isatty()) + print('\nIf the barcode does not render correctly, try ' + "changing your terminal's font (Monospace for Linux, " + 'Menlo for OS X). If you are using iTerm on Mac OS X, ' + 'you will need to change the "Non-ASCII Font", which ' + "is your profile\'s Text settings.\n\nCan't scan the " + 'barcode? Enter following shared secret manually:' + '\n{}\n'.format(user.formatted_otp_secret)) return 0 def _get_username_to_delete(): - return raw_input('Username to delete: ') + return obtain_input('Username to delete: ') def _get_delete_confirmation(user): - confirmation = raw_input('Are you sure you want to delete user ' - '"{}" (y/n)?'.format(user)) + confirmation = obtain_input('Are you sure you want to delete user ' + '"{}" (y/n)?'.format(user)) if confirmation.lower() != 'y': print('Confirmation not received: user "{}" was NOT ' 'deleted'.format(user)) @@ -221,40 +261,41 @@ def _get_delete_confirmation(user): return True -def delete_user(args): - """Deletes a journalist or administrator from the application.""" - username = _get_username_to_delete() - try: - selected_user = Journalist.query.filter_by(username=username).one() - except NoResultFound: - print('ERROR: That user was not found!') - return 0 +def delete_user(args, context=None): + """Deletes a journalist or admin from the application.""" + with context or app_context(): + username = _get_username_to_delete() + try: + selected_user = Journalist.query.filter_by(username=username).one() + except NoResultFound: + print('ERROR: That user was not found!') + return 0 - # Confirm deletion if user is found - if not _get_delete_confirmation(selected_user.username): - return 0 + # Confirm deletion if user is found + if not _get_delete_confirmation(selected_user.username): + return 0 - # Try to delete user from the database - try: - db_session.delete(selected_user) - db_session.commit() - except Exception as e: - # If the user was deleted between the user selection and confirmation, - # (e.g., through the web app), we don't report any errors. If the user - # is still there, but there was a error deleting them from the - # database, we do report it. + # Try to delete user from the database try: - Journalist.query.filter_by(username=username).one() - except NoResultFound: - pass - else: - raise e + db.session.delete(selected_user) + db.session.commit() + except Exception as e: + # If the user was deleted between the user selection and + # confirmation, (e.g., through the web app), we don't report any + # errors. If the user is still there, but there was a error + # deleting them from the database, we do report it. + try: + Journalist.query.filter_by(username=username).one() + except NoResultFound: + pass + else: + raise e - print('User "{}" successfully deleted'.format(username)) + print('User "{}" successfully deleted'.format(username)) return 0 -def clean_tmp(args): # pragma: no cover +def clean_tmp(args): """Cleanup the SecureDrop temp directory. """ if not os.path.exists(args.directory): log.debug('{} does not exist, do nothing'.format(args.directory)) @@ -275,121 +316,26 @@ def listdir_fullpath(d): return 0 -def translate_messages(args): - messages_file = os.path.join(args.translations_dir, 'messages.pot') - - if args.extract_update: - sh(""" - set -xe - - mkdir -p {translations_dir} - - pybabel extract \ - --charset=utf-8 \ - --mapping={mapping} \ - --output={messages_file} \ - --project=SecureDrop \ - --version={version} \ - --msgid-bugs-address='[email protected]' \ - --copyright-holder='Freedom of the Press Foundation' \ - {sources} - - # remove this line so the file does not change if no - # strings are modified - sed -i '/^"POT-Creation-Date/d' {messages_file} - """.format(translations_dir=args.translations_dir, - mapping=args.mapping, - messages_file=messages_file, - version=args.version, - sources=" ".join(args.source))) - - changed = subprocess.call("git diff --quiet {}".format(messages_file), - shell=True) - - if changed and len(os.listdir(args.translations_dir)) > 1: - sh(""" - set -xe - for translation in {translations_dir}/*/LC_MESSAGES/*.po ; do - msgmerge --previous --update $translation {messages_file} - done - """.format(translations_dir=args.translations_dir, - messages_file=messages_file)) - log.warning("messages translations updated in " + messages_file) - else: - log.warning("messages translations are already up to date") - - if args.compile and len(os.listdir(args.translations_dir)) > 1: - sh(""" - set -x - pybabel compile --directory {translations_dir} - """.format(translations_dir=args.translations_dir)) - - -def translate_desktop(args): - messages_file = os.path.join(args.translations_dir, 'desktop.pot') - - if args.extract_update: - sh(""" - set -xe - cd {translations_dir} - xgettext \ - --output=desktop.pot \ - --language=Desktop \ - --keyword \ - --keyword=Name \ - --package-version={version} \ - --msgid-bugs-address='[email protected]' \ - --copyright-holder='Freedom of the Press Foundation' \ - {sources} - - # remove this line so the file does not change if no - # strings are modified - sed -i '/^"POT-Creation-Date/d' {messages_file} - """.format(translations_dir=args.translations_dir, - messages_file=messages_file, - version=args.version, - sources=" ".join(args.source))) - - changed = subprocess.call("git diff --quiet {}".format(messages_file), - shell=True) - - if changed: - for f in os.listdir(args.translations_dir): - if not f.endswith('.po'): - continue - po_file = os.path.join(args.translations_dir, f) - sh(""" - msgmerge --update {po_file} {messages_file} - """.format(po_file=po_file, - messages_file=messages_file)) - log.warning("messages translations updated in " + messages_file) - else: - log.warning("desktop translations are already up to date") - - if args.compile: - sh(""" - set -ex - cd {translations_dir} - find *.po | sed -e 's/\.po$//' > LINGUAS - for source in {sources} ; do - target=$(basename $source .in) - msgfmt --desktop --template $source -o $target -d . - done - """.format(translations_dir=args.translations_dir, - sources=" ".join(args.source))) +def init_db(args): + user = pwd.getpwnam(args.user) + subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases']) + os.chown(config.DATABASE_FILE, user.pw_uid, user.pw_gid) + os.chmod(config.DATABASE_FILE, 0o0640) + subprocess.check_call(['alembic', 'upgrade', 'head']) def get_args(): parser = argparse.ArgumentParser(prog=__file__, description='Management ' 'and testing utility for SecureDrop.') parser.add_argument('-v', '--verbose', action='store_true') + parser.add_argument('--data-root', + default=config.SECUREDROP_DATA_ROOT, + help=('directory in which the securedrop ' + 'data is stored')) + parser.add_argument('--store-dir', + default=config.STORE_DIR, + help=('directory in which the documents are stored')) subps = parser.add_subparsers() - # Run WSGI app - run_subp = subps.add_parser('run', help='Run the Werkzeug source & ' - 'journalist WSGI apps. WARNING!!! For ' - 'development only, not to be used in ' - 'production.') - run_subp.set_defaults(func=run) # Add/remove journalists + admins admin_subp = subps.add_parser('add-admin', help='Add an admin to the ' 'application.') @@ -407,64 +353,37 @@ def get_args(): delete_user_subp_a = subps.add_parser('delete_user', help='^') delete_user_subp_a.set_defaults(func=delete_user) - # Reset application state - reset_subp = subps.add_parser('reset', help='DANGER!!! Clears the ' - "SecureDrop application's state.") - reset_subp.set_defaults(func=reset) + add_check_db_disconnect_parser(subps) + add_check_fs_disconnect_parser(subps) + add_delete_db_disconnect_parser(subps) + add_delete_fs_disconnect_parser(subps) + add_list_db_disconnect_parser(subps) + add_list_fs_disconnect_parser(subps) + # Cleanup the SD temp dir set_clean_tmp_parser(subps, 'clean-tmp') set_clean_tmp_parser(subps, 'clean_tmp') - set_translate_messages_parser(subps) - set_translate_desktop_parser(subps) + init_db_subp = subps.add_parser('init-db', help='Initialize the database.\n') + init_db_subp.add_argument('-u', '--user', + help='Unix user for the DB', + required=True) + init_db_subp.set_defaults(func=init_db) - return parser + add_were_there_submissions_today(subps) + # Run WSGI app + run_subp = subps.add_parser('run', help='DANGER!!! ONLY FOR DEVELOPMENT ' + 'USE. DO NOT USE IN PRODUCTION. Run the ' + 'Werkzeug source and journalist WSGI apps.\n') + run_subp.set_defaults(func=run) -def set_translate_parser(subps, - parser, - translations_dir, - sources): - parser.add_argument( - '--extract-update', - action='store_true', - help='extract strings to translate and update existing translations') - parser.add_argument( - '--compile', - action='store_true', - help='compile translations') - parser.add_argument( - '--translations-dir', - default=translations_dir, - help='Base directory for translation files (default {})'.format( - translations_dir)) - parser.add_argument( - '--version', - default=version.__version__, - help='SecureDrop version to store in pot files (default {})'.format( - version.__version__)) - parser.add_argument( - '--source', - default=sources, - action='append', - help='Source files and directories to extract (default {})'.format( - sources)) - - -def set_translate_messages_parser(subps): - parser = subps.add_parser('translate-messages', - help=('Update and compile ' - 'source and template translations')) - translations_dir = join(dirname(realpath(__file__)), 'translations') - sources = ['.', 'source_templates', 'journalist_templates'] - set_translate_parser(subps, parser, translations_dir, sources) - mapping = 'babel.cfg' - parser.add_argument( - '--mapping', - default=mapping, - help='Mapping of files to consider (default {})'.format( - mapping)) - parser.set_defaults(func=translate_messages) + # Reset application state + reset_subp = subps.add_parser('reset', help='DANGER!!! ONLY FOR DEVELOPMENT ' + 'USE. DO NOT USE IN PRODUCTION. Clear the ' + 'SecureDrop application\'s state.\n') + reset_subp.set_defaults(func=reset) + return parser def set_clean_tmp_parser(subps, name): @@ -485,18 +404,6 @@ def set_clean_tmp_parser(subps, name): parser.set_defaults(func=clean_tmp) -def set_translate_desktop_parser(subps): - parser = subps.add_parser('translate-desktop', - help=('Update and compile ' - 'desktop icons translations')) - translations_dir = join( - dirname(realpath(__file__)), - '../install_files/ansible-base/roles/tails-config/templates') - sources = ['desktop-journalist-icon.j2.in', 'desktop-source-icon.j2.in'] - set_translate_parser(subps, parser, translations_dir, sources) - parser.set_defaults(func=translate_desktop) - - def setup_verbosity(args): if args.verbose: logging.getLogger(__name__).setLevel(logging.DEBUG) @@ -506,10 +413,15 @@ def setup_verbosity(args): def _run_from_commandline(): # pragma: no cover try: - args = get_args().parse_args() + parser = get_args() + args = parser.parse_args() setup_verbosity(args) - rc = args.func(args) - sys.exit(rc) + try: + rc = args.func(args) + sys.exit(rc) + except AttributeError: + parser.print_help() + parser.exit() except KeyboardInterrupt: sys.exit(signal.SIGINT) diff --git a/securedrop/management/__init__.py b/securedrop/management/__init__.py --- a/securedrop/management/__init__.py +++ b/securedrop/management/__init__.py @@ -0,0 +1,10 @@ +from contextlib import contextmanager + +import journalist_app +from sdconfig import config + + +@contextmanager +def app_context(): + with journalist_app.create_app(config).app_context(): + yield diff --git a/securedrop/management/run.py b/securedrop/management/run.py --- a/securedrop/management/run.py +++ b/securedrop/management/run.py @@ -88,16 +88,6 @@ def __init__(self, proc_funcs): def monitor(self): while True: - # TODO: we currently don't handle input, which makes using an - # interactive debugger like pdb impossible. Since Flask provides - # a featureful in-browser debugger, I'll accept that pdb is - # broken for now. If someone really wants it, they should be - # able to change this function to make it work (although I'm not - # sure how hard that would be). - # - # If you really want to use pdb, you can just run the - # application scripts individually (`python source.py` or - # `python journalist.py`). rprocs, _, _ = select.select(self.procs, [], []) for proc in rprocs: @@ -109,7 +99,7 @@ def monitor(self): self.last_proc = proc line = proc.stdout.readline() - sys.stdout.write(line) + sys.stdout.write(line.decode('utf-8')) sys.stdout.flush() if any(proc.poll() is not None for proc in self.procs): @@ -153,8 +143,7 @@ def run(args): # pragma: no cover * https://stackoverflow.com/q/22565606/837471 """ - print \ -""" + print(""" ____ ____ /\\ _`\\ /\\ _`\\ \\ \\,\\L\\_\\ __ ___ __ __ _ __ __\\ \\ \\/\\ \\ _ __ ___ _____ @@ -164,13 +153,13 @@ def run(args): # pragma: no cover \\/_____/\\/____/\\/____/ \\/___/ \\/_/ \\/____/ \\/___/ \\/_/ \\/___/ \\ \\ \\/ \\ \\_\\ \\/_/ -""" # noqa +""") # noqa procs = [ lambda: DevServerProcess('Source Interface', ['python', 'source.py'], 'blue'), - lambda: DevServerProcess('Document Interface', + lambda: DevServerProcess('Journalist Interface', ['python', 'journalist.py'], 'cyan'), lambda: DevServerProcess('SASS Compiler', diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py new file mode 100644 --- /dev/null +++ b/securedrop/management/submissions.py @@ -0,0 +1,245 @@ +from __future__ import print_function + +import datetime +import os +import sys +import time + +from db import db +from rm import secure_delete +from models import Reply, Source, Submission +from management import app_context + + +def find_disconnected_db_submissions(path): + """ + Finds Submission records whose file does not exist. + """ + submissions = db.session.query(Submission).order_by(Submission.id, Submission.filename).all() + + files_in_fs = {} + for directory, subdirs, files in os.walk(path): + for f in files: + files_in_fs[f] = os.path.abspath(os.path.join(directory, f)) + + disconnected_submissions = [s for s in submissions if s.filename not in files_in_fs] + + return disconnected_submissions + + +def check_for_disconnected_db_submissions(args): + """ + Check for Submission records whose files are missing. + """ + with app_context(): + disconnected = find_disconnected_db_submissions(args.store_dir) + if disconnected: + print( + "There are submissions in the database with no corresponding files. " + 'Run "manage.py list-disconnected-db-submissions" for details.' + ) + else: + print("No problems were found. All submissions' files are present.") + + +def list_disconnected_db_submissions(args): + """ + List the IDs of Submission records whose files are missing. + """ + with app_context(): + disconnected_submissions = find_disconnected_db_submissions(args.store_dir) + if disconnected_submissions: + print( + 'Run "manage.py delete-disconnected-db-submissions" to delete these records.', + file=sys.stderr, + ) + for s in disconnected_submissions: + print(s.id) + + +def delete_disconnected_db_submissions(args): + """ + Delete Submission records whose files are missing. + """ + with app_context(): + disconnected_submissions = find_disconnected_db_submissions(args.store_dir) + ids = [s.id for s in disconnected_submissions] + + remove = args.force + if not args.force: + remove = input("Enter 'y' to delete all submissions missing files: ") == "y" + if remove: + print("Removing submission IDs {}...".format(ids)) + db.session.query(Submission).filter(Submission.id.in_(ids)).delete( + synchronize_session="fetch" + ) + db.session.commit() + else: + print("Not removing disconnected submissions in database.") + + +def find_disconnected_fs_submissions(path): + """ + Finds files in the store that lack a Submission or Reply record. + """ + submissions = Submission.query.order_by(Submission.id, Submission.filename).all() + files_in_db = {s.filename: True for s in submissions} + + replies = Reply.query.order_by(Reply.id, Reply.filename).all() + files_in_db.update({r.filename: True for r in replies}) + + files_in_fs = {} + for directory, subdirs, files in os.walk(path): + for f in files: + files_in_fs[f] = os.path.abspath(os.path.join(directory, f)) + + disconnected_files = [] + for f, p in files_in_fs.items(): + if f not in files_in_db: + filesize = os.stat(p).st_size + disconnected_files.append((p, filesize)) + + disconnected_files = [t[0] for t in sorted(disconnected_files, key=lambda t: t[1])] + + return disconnected_files + + +def check_for_disconnected_fs_submissions(args): + """ + Check for files without a corresponding Submission or Reply record in the database. + """ + with app_context(): + disconnected = find_disconnected_fs_submissions(args.store_dir) + if disconnected: + print( + "There are files in the submission area with no corresponding records in the " + 'database. Run "manage.py list-disconnected-fs-submissions" for details.' + ) + else: + print("No unexpected files were found in the store.") + + +def list_disconnected_fs_submissions(args): + """ + List files without a corresponding Submission or Reply record in the database. + """ + with app_context(): + disconnected_files = find_disconnected_fs_submissions(args.store_dir) + if disconnected_files: + print( + 'Run "manage.py delete-disconnected-fs-submissions" to delete these files.', + file=sys.stderr, + ) + for f in disconnected_files: + print(f) + + +def delete_disconnected_fs_submissions(args): + """ + Delete files without a corresponding Submission record in the database. + """ + with app_context(): + disconnected_files = find_disconnected_fs_submissions(args.store_dir) + bytes_deleted = 0 + time_elapsed = 0.0 + rate = 1.0 + filecount = len(disconnected_files) + eta = 1.0 + eta_msg = "" + for i, f in enumerate(disconnected_files, 1): + remove = args.force + if not args.force: + remove = input("Enter 'y' to delete {}: ".format(f)) == "y" + if remove: + filesize = os.stat(f).st_size + if i > 1: + eta = filesize / rate + eta_msg = " (ETA to remove {:d} bytes: {:.0f}s )".format(filesize, eta) + print("Securely removing file {}/{} {}{}...".format(i, filecount, f, eta_msg)) + start = time.time() + secure_delete(f) + file_elapsed = time.time() - start + bytes_deleted += filesize + time_elapsed += file_elapsed + rate = bytes_deleted / time_elapsed + print( + "elapsed: {:.2f}s rate: {:.1f} MB/s overall rate: {:.1f} MB/s".format( + file_elapsed, filesize / 1048576 / file_elapsed, rate / 1048576 + ) + ) + else: + print("Not removing {}.".format(f)) + + +def were_there_submissions_today(args, context=None): + with context or app_context(): + something = ( + db.session.query(Source) + .filter(Source.last_updated > datetime.datetime.utcnow() - datetime.timedelta(hours=24)) + .count() + > 0 + ) + count_file = os.path.join(args.data_root, "submissions_today.txt") + open(count_file, "w").write(something and "1" or "0") + + +def add_check_db_disconnect_parser(subps): + check_db_disconnect_subp = subps.add_parser( + "check-disconnected-db-submissions", + help="Check for submissions that exist in the database but not the filesystem.", + ) + check_db_disconnect_subp.set_defaults(func=check_for_disconnected_db_submissions) + + +def add_check_fs_disconnect_parser(subps): + check_fs_disconnect_subp = subps.add_parser( + "check-disconnected-fs-submissions", + help="Check for submissions that exist in the filesystem but not in the database.", + ) + check_fs_disconnect_subp.set_defaults(func=check_for_disconnected_fs_submissions) + + +def add_delete_db_disconnect_parser(subps): + delete_db_disconnect_subp = subps.add_parser( + "delete-disconnected-db-submissions", + help="Delete submissions that exist in the database but not the filesystem.", + ) + delete_db_disconnect_subp.set_defaults(func=delete_disconnected_db_submissions) + delete_db_disconnect_subp.add_argument( + "--force", action="store_true", help="Do not ask for confirmation." + ) + + +def add_delete_fs_disconnect_parser(subps): + delete_fs_disconnect_subp = subps.add_parser( + "delete-disconnected-fs-submissions", + help="Delete submissions that exist in the filesystem but not the database.", + ) + delete_fs_disconnect_subp.set_defaults(func=delete_disconnected_fs_submissions) + delete_fs_disconnect_subp.add_argument( + "--force", action="store_true", help="Do not ask for confirmation." + ) + + +def add_list_db_disconnect_parser(subps): + list_db_disconnect_subp = subps.add_parser( + "list-disconnected-db-submissions", + help="List submissions that exist in the database but not the filesystem.", + ) + list_db_disconnect_subp.set_defaults(func=list_disconnected_db_submissions) + + +def add_list_fs_disconnect_parser(subps): + list_fs_disconnect_subp = subps.add_parser( + "list-disconnected-fs-submissions", + help="List submissions that exist in the filesystem but not the database.", + ) + list_fs_disconnect_subp.set_defaults(func=list_disconnected_fs_submissions) + + +def add_were_there_submissions_today(subps): + parser = subps.add_parser( + "were-there-submissions-today", + help=("Update the file indicating " "whether submissions were received in the past 24h."), + ) + parser.set_defaults(func=were_there_submissions_today) diff --git a/securedrop/models.py b/securedrop/models.py new file mode 100644 --- /dev/null +++ b/securedrop/models.py @@ -0,0 +1,806 @@ +# -*- coding: utf-8 -*- +import binascii +import datetime +import base64 +import os +import scrypt +import pyotp +import qrcode +# Using svg because it doesn't require additional dependencies +import qrcode.image.svg +import uuid +from io import BytesIO + +from flask import current_app, url_for +from itsdangerous import TimedJSONWebSignatureSerializer, BadData +from jinja2 import Markup +from passlib.hash import argon2 +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship, backref +from sqlalchemy import Column, Integer, String, Boolean, DateTime, LargeBinary +from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound + +from db import db + +import typing + +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from typing import Callable, Optional, Union, Dict, List, Any # noqa: F401 + from io import BufferedIOBase # noqa: F401 + from logging import Logger # noqa: F401 + from sqlalchemy import Query # noqa: F401 + from pyotp import OTP # noqa: F401 + +LOGIN_HARDENING = True +if os.environ.get('SECUREDROP_ENV') == 'test': + LOGIN_HARDENING = False + +ARGON2_PARAMS = dict(memory_cost=2**16, rounds=4, parallelism=2) + + +def get_one_or_else(query, logger, failure_method): + # type: (Query, Logger, Callable[[int], None]) -> None + try: + return query.one() + except MultipleResultsFound as e: + logger.error( + "Found multiple while executing %s when one was expected: %s" % + (query, e, )) + failure_method(500) + except NoResultFound as e: + logger.error("Found none when one was expected: %s" % (e,)) + failure_method(404) + + +class Source(db.Model): + __tablename__ = 'sources' + id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) + filesystem_id = Column(String(96), unique=True) + journalist_designation = Column(String(255), nullable=False) + flagged = Column(Boolean, default=False) + last_updated = Column(DateTime) + star = relationship("SourceStar", uselist=False, backref="source") + + # sources are "pending" and don't get displayed to journalists until they + # submit something + pending = Column(Boolean, default=True) + + # keep track of how many interactions have happened, for filenames + interaction_count = Column(Integer, default=0, nullable=False) + + # Don't create or bother checking excessively long codenames to prevent DoS + NUM_WORDS = 7 + MAX_CODENAME_LEN = 128 + + def __init__(self, filesystem_id=None, journalist_designation=None): + # type: (str, str) -> None + self.filesystem_id = filesystem_id + self.journalist_designation = journalist_designation + self.uuid = str(uuid.uuid4()) + + def __repr__(self): + # type: () -> str + return '<Source %r>' % (self.journalist_designation) + + @property + def journalist_filename(self): + # type: () -> str + valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_' + return ''.join([c for c in self.journalist_designation.lower().replace( + ' ', '_') if c in valid_chars]) + + def documents_messages_count(self): + # type: () -> Dict[str, int] + self.docs_msgs_count = {'messages': 0, 'documents': 0} + for submission in self.submissions: + if submission.filename.endswith('msg.gpg'): + self.docs_msgs_count['messages'] += 1 + elif (submission.filename.endswith('doc.gz.gpg') or + submission.filename.endswith('doc.zip.gpg')): + self.docs_msgs_count['documents'] += 1 + return self.docs_msgs_count + + @property + def collection(self): + # type: () -> List[Union[Submission, Reply]] + """Return the list of submissions and replies for this source, sorted + in ascending order by the filename/interaction count.""" + collection = [] # type: List[Union[Submission, Reply]] + collection.extend(self.submissions) + collection.extend(self.replies) + collection.sort(key=lambda x: int(x.filename.split('-')[0])) + return collection + + @property + def fingerprint(self): + return current_app.crypto_util.getkey(self.filesystem_id) + + @fingerprint.setter + def fingerprint(self, value): + raise NotImplementedError + + @fingerprint.deleter + def fingerprint(self): + raise NotImplementedError + + @property + def public_key(self): + # type: () -> str + return current_app.crypto_util.export_pubkey(self.filesystem_id) + + @public_key.setter + def public_key(self, value): + # type: (str) -> None + raise NotImplementedError + + @public_key.deleter + def public_key(self): + # type: () -> None + raise NotImplementedError + + def to_json(self): + # type: () -> Dict[str, Union[str, bool, int, str]] + docs_msg_count = self.documents_messages_count() + + if self.last_updated: + last_updated = self.last_updated.isoformat() + 'Z' + else: + last_updated = datetime.datetime.utcnow().isoformat() + 'Z' + + if self.star and self.star.starred: + starred = True + else: + starred = False + + json_source = { + 'uuid': self.uuid, + 'url': url_for('api.single_source', source_uuid=self.uuid), + 'journalist_designation': self.journalist_designation, + 'is_flagged': self.flagged, + 'is_starred': starred, + 'last_updated': last_updated, + 'interaction_count': self.interaction_count, + 'key': { + 'type': 'PGP', + 'public': self.public_key, + 'fingerprint': self.fingerprint + }, + 'number_of_documents': docs_msg_count['documents'], + 'number_of_messages': docs_msg_count['messages'], + 'submissions_url': url_for('api.all_source_submissions', + source_uuid=self.uuid), + 'add_star_url': url_for('api.add_star', source_uuid=self.uuid), + 'remove_star_url': url_for('api.remove_star', + source_uuid=self.uuid), + 'replies_url': url_for('api.all_source_replies', + source_uuid=self.uuid) + } + return json_source + + +class Submission(db.Model): + __tablename__ = 'submissions' + id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) + source_id = Column(Integer, ForeignKey('sources.id')) + source = relationship( + "Source", + backref=backref("submissions", order_by=id, cascade="delete") + ) + + filename = Column(String(255), nullable=False) + size = Column(Integer, nullable=False) + downloaded = Column(Boolean, default=False) + ''' + The checksum of the encrypted file on disk. + Format: $hash_name:$hex_encoded_hash_value + Example: sha256:05fa5efd7d1b608ac1fbdf19a61a5a439d05b05225e81faa63fdd188296b614a + ''' + checksum = Column(String(255)) + + def __init__(self, source, filename): + # type: (Source, str) -> None + self.source_id = source.id + self.filename = filename + self.uuid = str(uuid.uuid4()) + self.size = os.stat(current_app.storage.path(source.filesystem_id, + filename)).st_size + + def __repr__(self): + # type: () -> str + return '<Submission %r>' % (self.filename) + + def to_json(self): + # type: () -> Dict[str, Union[str, int, bool]] + json_submission = { + 'source_url': url_for('api.single_source', + source_uuid=self.source.uuid), + 'submission_url': url_for('api.single_submission', + source_uuid=self.source.uuid, + submission_uuid=self.uuid), + 'filename': self.filename, + 'size': self.size, + 'is_read': self.downloaded, + 'uuid': self.uuid, + 'download_url': url_for('api.download_submission', + source_uuid=self.source.uuid, + submission_uuid=self.uuid), + } + return json_submission + + +class Reply(db.Model): + __tablename__ = "replies" + id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) + + journalist_id = Column(Integer, ForeignKey('journalists.id')) + journalist = relationship( + "Journalist", + backref=backref( + 'replies', + order_by=id)) + + source_id = Column(Integer, ForeignKey('sources.id')) + source = relationship( + "Source", + backref=backref("replies", order_by=id, cascade="delete") + ) + + filename = Column(String(255), nullable=False) + size = Column(Integer, nullable=False) + ''' + The checksum of the encrypted file on disk. + Format: $hash_name:$hex_encoded_hash_value + Example: sha256:05fa5efd7d1b608ac1fbdf19a61a5a439d05b05225e81faa63fdd188296b614a + ''' + checksum = Column(String(255)) + + deleted_by_source = Column(Boolean, default=False, nullable=False) + + def __init__(self, journalist, source, filename): + # type: (Journalist, Source, str) -> None + self.journalist_id = journalist.id + self.source_id = source.id + self.uuid = str(uuid.uuid4()) + self.filename = filename + self.size = os.stat(current_app.storage.path(source.filesystem_id, + filename)).st_size + + def __repr__(self): + # type: () -> str + return '<Reply %r>' % (self.filename) + + def to_json(self): + # type: () -> Dict[str, Union[str, int, bool]] + json_submission = { + 'source_url': url_for('api.single_source', + source_uuid=self.source.uuid), + 'reply_url': url_for('api.single_reply', + source_uuid=self.source.uuid, + reply_uuid=self.uuid), + 'filename': self.filename, + 'size': self.size, + 'journalist_username': self.journalist.username, + 'journalist_first_name': self.journalist.first_name, + 'journalist_last_name': self.journalist.last_name, + 'journalist_uuid': self.journalist.uuid, + 'uuid': self.uuid, + 'is_deleted_by_source': self.deleted_by_source, + } + return json_submission + + +class SourceStar(db.Model): + __tablename__ = 'source_stars' + id = Column("id", Integer, primary_key=True) + source_id = Column("source_id", Integer, ForeignKey('sources.id')) + starred = Column("starred", Boolean, default=True) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, SourceStar): + return (self.source_id == other.source_id and + self.id == other.id and self.starred == other.starred) + return False + + def __init__(self, source, starred=True): + # type: (Source, bool) -> None + self.source_id = source.id + self.starred = starred + + +class InvalidUsernameException(Exception): + + """Raised when a user logs in with an invalid username""" + + +class FirstOrLastNameError(Exception): + """Generic error for names that are invalid.""" + + def __init__(self, msg): + msg = 'Invalid first or last name.' + super(FirstOrLastNameError, self).__init__(msg) + + +class InvalidNameLength(FirstOrLastNameError): + """Raised when attempting to create a Journalist with an invalid name length.""" + + def __init__(self, name): + self.name_len = len(name) + if self.name_len > Journalist.MAX_NAME_LEN: + msg = "Name too long (len={})".format(self.name_len) + super(InvalidNameLength, self).__init__(msg) + + +class LoginThrottledException(Exception): + + """Raised when a user attempts to log in + too many times in a given time period""" + + +class WrongPasswordException(Exception): + + """Raised when a user logs in with an incorrect password""" + + +class BadTokenException(Exception): + + """Raised when a user logins in with an incorrect TOTP token""" + + +class PasswordError(Exception): + + """Generic error for passwords that are invalid. + """ + + +class InvalidPasswordLength(PasswordError): + """Raised when attempting to create a Journalist or log in with an invalid + password length. + """ + + def __init__(self, passphrase): + # type: (str) -> None + self.passphrase_len = len(passphrase) + + def __str__(self): + # type: () -> str + if self.passphrase_len > Journalist.MAX_PASSWORD_LEN: + return "Password too long (len={})".format(self.passphrase_len) + if self.passphrase_len < Journalist.MIN_PASSWORD_LEN: + return "Password needs to be at least {} characters".format( + Journalist.MIN_PASSWORD_LEN + ) + return "" # return empty string that can be appended harmlessly + + +class NonDicewarePassword(PasswordError): + + """Raised when attempting to validate a password that is not diceware-like + """ + + +class Journalist(db.Model): + __tablename__ = "journalists" + id = Column(Integer, primary_key=True) + uuid = Column(String(36), unique=True, nullable=False) + username = Column(String(255), nullable=False, unique=True) + first_name = Column(String(255)) + last_name = Column(String(255)) + pw_salt = Column(LargeBinary(32)) + pw_hash = Column(LargeBinary(256)) + is_admin = Column(Boolean) + session_nonce = Column(Integer, nullable=False, default=0) + + otp_secret = Column(String(16), default=pyotp.random_base32) + is_totp = Column(Boolean, default=True) + hotp_counter = Column(Integer, default=0) + last_token = Column(String(6)) + + created_on = Column(DateTime, default=datetime.datetime.utcnow) + last_access = Column(DateTime) + passphrase_hash = Column(String(256)) + login_attempts = relationship( + "JournalistLoginAttempt", + backref="journalist") + + MIN_USERNAME_LEN = 3 + MIN_NAME_LEN = 0 + MAX_NAME_LEN = 100 + + def __init__(self, username, password, first_name=None, last_name=None, is_admin=False, + otp_secret=None): + # type: (str, str, Optional[str], Optional[str], bool, Optional[str]) -> None + + self.check_username_acceptable(username) + self.username = username + if first_name: + self.check_name_acceptable(first_name) + self.first_name = first_name + if last_name: + self.check_name_acceptable(last_name) + self.last_name = last_name + self.set_password(password) + self.is_admin = is_admin + self.uuid = str(uuid.uuid4()) + + if otp_secret: + self.set_hotp_secret(otp_secret) + + def __repr__(self): + # type: () -> str + return "<Journalist {0}{1}>".format( + self.username, + " [admin]" if self.is_admin else "") + + _LEGACY_SCRYPT_PARAMS = dict(N=2**14, r=8, p=1) + + def _scrypt_hash(self, password, salt): + # type: (str, str) -> str + return scrypt.hash(str(password), salt, **self._LEGACY_SCRYPT_PARAMS) + + MAX_PASSWORD_LEN = 128 + MIN_PASSWORD_LEN = 14 + + def set_password(self, passphrase): + # type: (str) -> None + self.check_password_acceptable(passphrase) + + # "migrate" from the legacy case + if not self.passphrase_hash: + self.passphrase_hash = \ + argon2.using(**ARGON2_PARAMS).hash(passphrase) + # passlib creates one merged field that embeds randomly generated + # salt in the output like $alg$salt$hash + self.pw_hash = None + self.pw_salt = None + + # Don't do anything if user's password hasn't changed. + if self.passphrase_hash and self.valid_password(passphrase): + return + + self.passphrase_hash = argon2.using(**ARGON2_PARAMS).hash(passphrase) + + def set_name(self, first_name, last_name): + if first_name: + self.check_name_acceptable(first_name) + if last_name: + self.check_name_acceptable(last_name) + self.first_name = first_name + self.last_name = last_name + + @classmethod + def check_username_acceptable(cls, username): + # type: (str) -> None + if len(username) < cls.MIN_USERNAME_LEN: + raise InvalidUsernameException( + 'Username "{}" must be at least {} characters long.' + .format(username, cls.MIN_USERNAME_LEN)) + + @classmethod + def check_name_acceptable(cls, name): + # Enforce a reasonable maximum length for names + if len(name) > cls.MAX_NAME_LEN: + raise InvalidNameLength(name) + + @classmethod + def check_password_acceptable(cls, password): + # type: (str) -> None + # Enforce a reasonable maximum length for passwords to avoid DoS + if len(password) > cls.MAX_PASSWORD_LEN: + raise InvalidPasswordLength(password) + + # Enforce a reasonable minimum length for new passwords + if len(password) < cls.MIN_PASSWORD_LEN: + raise InvalidPasswordLength(password) + + # Ensure all passwords are "diceware-like" + if len(password.split()) < 7: + raise NonDicewarePassword() + + def valid_password(self, passphrase): + # type: (str) -> bool + # Avoid hashing passwords that are over the maximum length + if len(passphrase) > self.MAX_PASSWORD_LEN: + raise InvalidPasswordLength(passphrase) + + # No check on minimum password length here because some passwords + # may have been set prior to setting the mininum password length. + + if self.passphrase_hash: + # default case + is_valid = argon2.verify(passphrase, self.passphrase_hash) + else: + # legacy support + is_valid = pyotp.utils.compare_digest( + self._scrypt_hash(passphrase, self.pw_salt), + self.pw_hash) + + # migrate new passwords + if is_valid and not self.passphrase_hash: + self.passphrase_hash = \ + argon2.using(**ARGON2_PARAMS).hash(passphrase) + # passlib creates one merged field that embeds randomly generated + # salt in the output like $alg$salt$hash + self.pw_salt = None + self.pw_hash = None + db.session.add(self) + db.session.commit() + + return is_valid + + def regenerate_totp_shared_secret(self): + # type: () -> None + self.otp_secret = pyotp.random_base32() + + def set_hotp_secret(self, otp_secret): + # type: (str) -> None + self.otp_secret = base64.b32encode( + binascii.unhexlify( + otp_secret.replace( + " ", + ""))) + self.is_totp = False + self.hotp_counter = 0 + + @property + def totp(self): + # type: () -> OTP + if self.is_totp: + return pyotp.TOTP(self.otp_secret) + else: + raise ValueError('{} is not using TOTP'.format(self)) + + @property + def hotp(self): + # type: () -> OTP + if not self.is_totp: + return pyotp.HOTP(self.otp_secret) + else: + raise ValueError('{} is not using HOTP'.format(self)) + + @property + def shared_secret_qrcode(self): + # type: () -> Markup + uri = self.totp.provisioning_uri( + self.username, + issuer_name="SecureDrop") + + qr = qrcode.QRCode( + box_size=15, + image_factory=qrcode.image.svg.SvgPathImage + ) + qr.add_data(uri) + img = qr.make_image() + + svg_out = BytesIO() + img.save(svg_out) + return Markup(svg_out.getvalue().decode('utf-8')) + + @property + def formatted_otp_secret(self): + # type: () -> str + """The OTP secret is easier to read and manually enter if it is all + lowercase and split into four groups of four characters. The secret is + base32-encoded, so it is case insensitive.""" + sec = self.otp_secret + chunks = [sec[i:i + 4] for i in range(0, len(sec), 4)] + return ' '.join(chunks).lower() + + def _format_token(self, token): + # type: (str) -> str + """Strips from authentication tokens the whitespace + that many clients add for readability""" + return ''.join(token.split()) + + def verify_token(self, token): + # type: (str) -> bool + token = self._format_token(token) + + # Store latest token to prevent OTP token reuse + self.last_token = token + db.session.commit() + + if self.is_totp: + # Also check the given token against the previous and next + # valid tokens, to compensate for potential time skew + # between the client and the server. The total valid + # window is 1:30s. + return self.totp.verify(token, valid_window=1) + else: + for counter_val in range( + self.hotp_counter, + self.hotp_counter + 20): + if self.hotp.verify(token, counter_val): + self.hotp_counter = counter_val + 1 + db.session.commit() + return True + return False + + _LOGIN_ATTEMPT_PERIOD = 60 # seconds + _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5 + + @classmethod + def throttle_login(cls, user): + # type: (Journalist) -> None + # Record the login attempt... + login_attempt = JournalistLoginAttempt(user) + db.session.add(login_attempt) + db.session.commit() + + # ...and reject it if they have exceeded the threshold + login_attempt_period = datetime.datetime.utcnow() - \ + datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD) + attempts_within_period = JournalistLoginAttempt.query.filter( + JournalistLoginAttempt.journalist_id == user.id).filter( + JournalistLoginAttempt.timestamp > login_attempt_period).all() + if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD: + raise LoginThrottledException( + "throttled ({} attempts in last {} seconds)".format( + len(attempts_within_period), + cls._LOGIN_ATTEMPT_PERIOD)) + + @classmethod + def login(cls, username, password, token): + # type: (str, str, str) -> Journalist + try: + user = Journalist.query.filter_by(username=username).one() + except NoResultFound: + raise InvalidUsernameException( + "invalid username '{}'".format(username)) + + if LOGIN_HARDENING: + cls.throttle_login(user) + + # Prevent TOTP token reuse + if user.last_token is not None: + if pyotp.utils.compare_digest(token, user.last_token): + raise BadTokenException("previously used two-factor code " + "{}".format(token)) + if not user.verify_token(token): + raise BadTokenException("invalid two-factor code") + if not user.valid_password(password): + raise WrongPasswordException("invalid password") + return user + + def generate_api_token(self, expiration): + # type: (int) -> str + s = TimedJSONWebSignatureSerializer( + current_app.config['SECRET_KEY'], expires_in=expiration) + return s.dumps({'id': self.id}).decode('ascii') # type:ignore + + @staticmethod + def validate_token_is_not_expired_or_invalid(token): + s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY']) + try: + s.loads(token) + except BadData: + return None + + return True + + @staticmethod + def validate_api_token_and_get_user(token): + # type: (str) -> Union[Journalist, None] + s = TimedJSONWebSignatureSerializer(current_app.config['SECRET_KEY']) + try: + data = s.loads(token) + except BadData: + return None + + revoked_token = RevokedToken.query.filter_by(token=token).one_or_none() + if revoked_token is not None: + return None + + return Journalist.query.get(data['id']) + + def to_json(self): + # type: () -> Dict[str, Union[str, bool, str]] + json_user = { + 'username': self.username, + 'last_login': self.last_access.isoformat() + 'Z', + 'is_admin': self.is_admin, + 'uuid': self.uuid, + 'first_name': self.first_name, + 'last_name': self.last_name + } + return json_user + + +class JournalistLoginAttempt(db.Model): + + """This model keeps track of journalist's login attempts so we can + rate limit them in order to prevent attackers from brute forcing + passwords or two-factor tokens.""" + __tablename__ = "journalist_login_attempt" + id = Column(Integer, primary_key=True) + timestamp = Column(DateTime, default=datetime.datetime.utcnow) + journalist_id = Column(Integer, ForeignKey('journalists.id')) + + def __init__(self, journalist): + # type: (Journalist) -> None + self.journalist_id = journalist.id + + +class RevokedToken(db.Model): + + """ + API tokens that have been revoked either through a logout or other revocation mechanism. + """ + + __tablename__ = 'revoked_tokens' + + id = Column(Integer, primary_key=True) + journalist_id = Column(Integer, ForeignKey('journalists.id')) + token = db.Column(db.Text, nullable=False, unique=True) + + +class InstanceConfig(db.Model): + '''Versioned key-value store of settings configurable from the journalist + interface. The current version has valid_until=None. + ''' + + __tablename__ = 'instance_config' + version = Column(Integer, primary_key=True) + valid_until = Column(DateTime, default=None, unique=True) + + allow_document_uploads = Column(Boolean, default=True) + + # Columns not listed here will be included by InstanceConfig.copy() when + # updating the configuration. + metadata_cols = ['version', 'valid_until'] + + def __repr__(self): + return "<InstanceConfig(version=%s, valid_until=%s)>" % (self.version, self.valid_until) + + def copy(self): + '''Make a copy of only the configuration columns of the given + InstanceConfig object: i.e., excluding metadata_cols. + ''' + + new = type(self)() + for col in self.__table__.columns: + if col.name in self.metadata_cols: + continue + + setattr(new, col.name, getattr(self, col.name)) + + return new + + @classmethod + def get_current(cls): + '''If the database was created via db.create_all(), data migrations + weren't run, and the "instance_config" table is empty. In this case, + save and return a base configuration derived from each setting's + column-level default. + ''' + + try: + return cls.query.filter(cls.valid_until == None).one() # noqa: E711 + except NoResultFound: + current = cls() + db.session.add(current) + db.session.commit() + return current + + @classmethod + def set(cls, name, value): + '''Invalidate the current configuration and append a new one with the + requested change. + ''' + + old = cls.get_current() + old.valid_until = datetime.datetime.utcnow() + db.session.add(old) + + new = old.copy() + setattr(new, name, value) + db.session.add(new) + + db.session.commit() diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py new file mode 100755 --- /dev/null +++ b/securedrop/qa_loader.py @@ -0,0 +1,253 @@ +#!/opt/venvs/securedrop-app-code/bin/python +# -*- coding: utf-8 -*- + +import math +import os +import random +import string +import sys + +from argparse import ArgumentParser +from datetime import datetime +from flask import current_app +from os import path +from sqlalchemy import text + +from crypto_util import DICEWARE_SAFE_CHARS +from db import db +from journalist_app import create_app +from models import (Journalist, Source, Submission, SourceStar, Reply, + JournalistLoginAttempt) +from sdconfig import config as sdconfig + +random.seed('~(=^–^)') # mrow? + + +def random_bool(): + return bool(random.getrandbits(1)) + + +def random_chars(len, nullable, chars=string.printable): + if nullable and random_bool(): + return None + else: + return ''.join([random.choice(chars) for _ in range(len)]) + + +def bool_or_none(): + return random.choice([True, False, None]) + + +def random_datetime(nullable): + if nullable and random_bool(): + return None + else: + return datetime( + year=random.randint(1, 9999), + month=random.randint(1, 12), + day=random.randint(1, 28), + hour=random.randint(0, 23), + minute=random.randint(0, 59), + second=random.randint(0, 59), + microsecond=random.randint(0, 1000), + ) + + +def positive_int(s): + i = int(s) + if i < 1: + raise ValueError('{} is not >= 1'.format(s)) + return i + + +class QaLoader(object): + + JOURNALIST_COUNT = 10 + SOURCE_COUNT = 50 + + def __init__(self, config, multiplier): + self.config = config + self.app = create_app(config) + self.multiplier = multiplier + + self.journalists = [] + self.sources = [] + self.submissions = [] + + def new_journalist(self): + # Make a diceware-like password + pw = ' '.join( + [random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) + for _ in range(7)]) + journalist = Journalist(username=random_chars(random.randint(3, 32), nullable=False), + password=pw, + is_admin=random_bool()) + if random_bool(): + # to add legacy passwords back in + journalist.passphrase_hash = None + journalist.pw_salt = random_chars(32, nullable=False).encode('utf-8') + journalist.pw_hash = random_chars(64, nullable=False).encode('utf-8') + + journalist.is_admin = bool_or_none() + + journalist.is_totp = bool_or_none() + journalist.hotp_counter = (random.randint(-1000, 1000) + if random_bool() else None) + journalist.created_on = random_datetime(nullable=True) + journalist.last_access = random_datetime(nullable=True) + + db.session.add(journalist) + db.session.flush() + self.journalists.append(journalist.id) + + def new_source(self): + fid_len = random.randint(4, 32) + designation_len = random.randint(4, 32) + source = Source(random_chars(fid_len, nullable=False, + chars=string.ascii_lowercase), + random_chars(designation_len, nullable=False)) + source.flagged = bool_or_none() + source.last_updated = random_datetime(nullable=False) + source.pending = False + + db.session.add(source) + db.session.flush() + self.sources.append(source.id) + + def new_submission(self, source_id): + source = Source.query.get(source_id) + + # A source may have a null fid according to the DB, but this will + # break storage.path. + if source.filesystem_id is None: + return + + filename = self.fake_file(source.filesystem_id) + submission = Submission(source, filename) + + # For issue #1189 + if random_bool(): + submission.source_id = None + + submission.downloaded = bool_or_none() + + db.session.add(submission) + db.session.flush() + self.submissions.append(submission.id) + + def fake_file(self, source_fid): + source_dir = path.join(self.config.STORE_DIR, source_fid) + if not path.exists(source_dir): + os.mkdir(source_dir) + + filename = random_chars(20, + nullable=False, + chars=string.ascii_lowercase) + num = random.randint(0, 100) + msg_type = 'msg' if random_bool() else 'doc.gz' + filename = '{}-{}-{}.gpg'.format(num, filename, msg_type) + f_len = int(math.floor(random.expovariate(100000) * 1024 * 1024 * 500)) + sub_path = current_app.storage.path(source_fid, filename) + with open(sub_path, 'w') as f: + f.write('x' * f_len) + + return filename + + def new_source_star(self, source_id): + source = Source.query.get(source_id) + star = SourceStar(source, bool_or_none()) + db.session.add(star) + + def new_reply(self, journalist_id, source_id): + source = Source.query.get(source_id) + + # A source may have a null fid according to the DB, but this will + # break storage.path. + if source.filesystem_id is None: + return + + journalist = Journalist.query.get(journalist_id) + filename = self.fake_file(source.filesystem_id) + reply = Reply(journalist, source, filename) + db.session.add(reply) + + def new_journalist_login_attempt(self, journalist_id): + journalist = Journalist.query.get(journalist_id) + attempt = JournalistLoginAttempt(journalist) + attempt.timestamp = random_datetime(nullable=True) + db.session.add(attempt) + + def new_abandoned_submission(self, source_id): + '''For issue #1189''' + + source = Source.query.filter(Source.filesystem_id.isnot(None)).all()[0] + filename = self.fake_file(source.filesystem_id) + + # Use this as hack to create a real submission then swap out the + # source_id + submission = Submission(source, filename) + submission.source_id = source_id + db.session.add(submission) + db.session.commit() + self.delete_source(source_id) + + def delete_source(self, source_id): + '''For issue #1189''' + db.session.execute(text('DELETE FROM sources WHERE id = :source_id'), + {'source_id': source_id}) + + def load(self): + with self.app.app_context(): + for _ in range(self.JOURNALIST_COUNT * self.multiplier): + self.new_journalist() + db.session.commit() + + for _ in range(self.SOURCE_COUNT * self.multiplier): + self.new_source() + db.session.commit() + + for sid in self.sources[0::5]: + for _ in range(1, self.multiplier + 1): + self.new_submission(sid) + db.session.commit() + + for sid in self.sources[0::5]: + self.new_source_star(sid) + db.session.commit() + + for jid in self.journalists[0::10]: + for sid in self.sources[0::10]: + for _ in range(1, 3): + self.new_reply(jid, sid) + db.session.commit() + + for jid in self.journalists[0::10]: + self.new_journalist_login_attempt(jid) + db.session.commit() + + for sid in random.sample(self.sources, self.multiplier): + self.new_abandoned_submission(sid) + + +def arg_parser(): + parser = ArgumentParser( + path.basename(__file__), + description='Loads data into the database for testing upgrades') + parser.add_argument('-m', '--multiplier', type=positive_int, default=25, + help=('Factor to multiply the loaded data by ' + '(default 25)')) + return parser + + +def main(): + args = arg_parser().parse_args() + print('Loading data. This make take a while.') + QaLoader(sdconfig, args.multiplier).load() + + +if __name__ == '__main__': + try: + main() + except KeyboardInterrupt: + print('') # for prompt on a newline + sys.exit(1) diff --git a/securedrop/request_that_secures_file_uploads.py b/securedrop/request_that_secures_file_uploads.py --- a/securedrop/request_that_secures_file_uploads.py +++ b/securedrop/request_that_secures_file_uploads.py @@ -24,7 +24,7 @@ def _secure_file_stream(self, total_content_length, content_type, # note in `config.py` for more info. Instead, we just use # `/tmp`, which has the additional benefit of being # automatically cleared on reboot. - return SecureTemporaryFile('/tmp') + return SecureTemporaryFile('/tmp') # nosec return BytesIO() def make_form_data_parser(self): diff --git a/securedrop/rm.py b/securedrop/rm.py --- a/securedrop/rm.py +++ b/securedrop/rm.py @@ -16,9 +16,92 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +import errno +import logging +import os import subprocess -def srm(fn): - subprocess.check_call(['srm', '-r', fn]) - return "success" +def shred(path, delete=True): + # type: (str, bool) -> None + """ + Run shred on the file at the given path. + + Args: + path (str): The path to the file to shred. + delete (bool): Whether to unlink the file after shredding. + + Returns: + None + + Raises: + subprocess.CalledProcessError: If shred's return code is not zero. + EnvironmentError: If shred is not available. + """ + + if not os.path.exists(path): + raise EnvironmentError(path) + + if not os.path.isfile(path): + raise ValueError("The shred function only works on files.") + cmd = ["shred", "-z", "-n", "30"] + if delete: + cmd.append("-u") + cmd.append(path) + subprocess.check_call(cmd) + + +def secure_delete(path): + # type: (str) -> None + """ + Securely deletes the file at ``path``. + + Args: + path (str): The path to the file to delete. + + Returns: + str: A string signaling success to rq. + + Raises: + subprocess.CalledProcessError: If shred's return code is not zero. + EnvironmentError: If shred is not available. + """ + path = os.path.abspath(path) + + directories = [] + targets = [] + if not os.path.isdir(path): + targets.append(path) + else: + for directory, subdirs, files in os.walk(path): + directories.append(directory) + directories.extend([os.path.abspath(os.path.join(directory, s)) for s in subdirs]) + for f in files: + targets.append(os.path.abspath(os.path.join(directory, f))) + + for t in targets: + shred(t) + + directories_to_remove = set(directories) + for d in reversed(sorted(directories_to_remove)): + os.rmdir(d) + + +def check_secure_delete_capability(): + # type: () -> bool + """ + Checks the availability of the program we use for secure deletion. + + Returns: + bool: True if the program is available, otherwise False. + """ + try: + subprocess.check_output(["shred", "--help"]) + return True + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + logging.error("The shred utility is missing.") + except subprocess.CalledProcessError as e: + logging.error("The shred utility is broken: %s %s", e, e.output) + return False diff --git a/securedrop/sdconfig.py b/securedrop/sdconfig.py new file mode 100644 --- /dev/null +++ b/securedrop/sdconfig.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- + +import config as _config + +import typing +# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from typing import List, Dict # noqa: F401 + + +class SDConfig(object): + def __init__(self): + # type: () -> None + try: + self.JournalistInterfaceFlaskConfig = \ + _config.JournalistInterfaceFlaskConfig # type: ignore + except AttributeError: + pass + + try: + self.SourceInterfaceFlaskConfig = \ + _config.SourceInterfaceFlaskConfig # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_FILE = _config.DATABASE_FILE # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_USERNAME = _config.DATABASE_USERNAME # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_PASSWORD = _config.DATABASE_PASSWORD # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_HOST = _config.DATABASE_HOST # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_NAME = _config.DATABASE_NAME # type: ignore + except AttributeError: + pass + + try: + self.ADJECTIVES = _config.ADJECTIVES # type: ignore + except AttributeError: + pass + + try: + self.DATABASE_ENGINE = _config.DATABASE_ENGINE # type: ignore + except AttributeError: + pass + + try: + self.DEFAULT_LOCALE = _config.DEFAULT_LOCALE # type: ignore + except AttributeError: + pass + + try: + self.GPG_KEY_DIR = _config.GPG_KEY_DIR # type: ignore + except AttributeError: + pass + + try: + self.JOURNALIST_KEY = _config.JOURNALIST_KEY # type: ignore + except AttributeError: + pass + + try: + self.JOURNALIST_TEMPLATES_DIR = _config.JOURNALIST_TEMPLATES_DIR # type: ignore # noqa: E501 + except AttributeError: + pass + + try: + self.NOUNS = _config.NOUNS # type: ignore + except AttributeError: + pass + + try: + self.SCRYPT_GPG_PEPPER = _config.SCRYPT_GPG_PEPPER # type: ignore + except AttributeError: + pass + + try: + self.SCRYPT_ID_PEPPER = _config.SCRYPT_ID_PEPPER # type: ignore + except AttributeError: + pass + + try: + self.SCRYPT_PARAMS = _config.SCRYPT_PARAMS # type: ignore + except AttributeError: + pass + + try: + self.SECUREDROP_DATA_ROOT = _config.SECUREDROP_DATA_ROOT # type: ignore # noqa: E501 + except AttributeError: + pass + + try: + self.SECUREDROP_ROOT = _config.SECUREDROP_ROOT # type: ignore + except AttributeError: + pass + + try: + self.SESSION_EXPIRATION_MINUTES = \ + _config.SESSION_EXPIRATION_MINUTES # type: ignore + except AttributeError: + pass + + try: + self.SOURCE_TEMPLATES_DIR = \ + _config.SOURCE_TEMPLATES_DIR # type: ignore + except AttributeError: + pass + + try: + self.STORE_DIR = _config.STORE_DIR # type: ignore + except AttributeError: + pass + + try: + self.SUPPORTED_LOCALES = \ + _config.SUPPORTED_LOCALES # type: ignore + except AttributeError: + pass + + try: + self.TEMP_DIR = _config.TEMP_DIR # type: ignore + except AttributeError: + pass + + try: + self.WORD_LIST = _config.WORD_LIST # type: ignore + except AttributeError: + pass + + try: + self.WORKER_PIDFILE = _config.WORKER_PIDFILE # type: ignore + except AttributeError: + pass + + try: + self.TRANSLATION_DIRS = _config.TRANSLATION_DIRS # type: ignore + except AttributeError: + pass + + try: + self.env = _config.env # type: ignore + except AttributeError: + pass + + if getattr(self, 'env', 'prod') == 'test': + self.RQ_WORKER_NAME = 'test' + else: + self.RQ_WORKER_NAME = 'default' + + +config = SDConfig() # type: SDConfig diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py --- a/securedrop/secure_tempfile.py +++ b/securedrop/secure_tempfile.py @@ -1,12 +1,15 @@ # -*- coding: utf-8 -*- import base64 import os -from tempfile import _TemporaryFileWrapper +import io +from tempfile import _TemporaryFileWrapper # type: ignore -from gnupg._util import _STREAMLIKE_TYPES -from Cryptodome.Cipher import AES -from Cryptodome.Random import random -from Cryptodome.Util import Counter +from pretty_bad_protocol._util import _STREAMLIKE_TYPES +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import CTR +from cryptography.hazmat.primitives.ciphers import Cipher class SecureTemporaryFile(_TemporaryFileWrapper, object): @@ -44,10 +47,13 @@ def __init__(self, store_dir): """ self.last_action = 'init' self.create_key() - self.tmp_file_id = base64.urlsafe_b64encode(os.urandom(32)).strip('=') + + data = base64.urlsafe_b64encode(os.urandom(32)) + self.tmp_file_id = data.decode('utf-8').strip('=') + self.filepath = os.path.join(store_dir, '{}.aes'.format(self.tmp_file_id)) - self.file = open(self.filepath, 'w+b') + self.file = io.open(self.filepath, 'w+b') super(SecureTemporaryFile, self).__init__(self.file, self.filepath) def create_key(self): @@ -58,18 +64,17 @@ def create_key(self): grsecurity-patched kernel it uses (for further details consult https://github.com/freedomofpress/securedrop/pull/477#issuecomment-168445450). """ - self.key = os.urandom(self.AES_key_size / 8) - self.iv = random.getrandbits(self.AES_block_size) + self.key = os.urandom(self.AES_key_size // 8) + self.iv = os.urandom(self.AES_block_size // 8) self.initialize_cipher() def initialize_cipher(self): """Creates the cipher-related objects needed for AES-CTR encryption and decryption. """ - self.ctr_e = Counter.new(self.AES_block_size, initial_value=self.iv) - self.ctr_d = Counter.new(self.AES_block_size, initial_value=self.iv) - self.encryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_e) - self.decryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_d) + self.cipher = Cipher(AES(self.key), CTR(self.iv), default_backend()) + self.encryptor = self.cipher.encryptor() + self.decryptor = self.cipher.decryptor() def write(self, data): """Write `data` to the secure temporary file. This method may be @@ -81,10 +86,10 @@ def write(self, data): raise AssertionError('You cannot write after reading!') self.last_action = 'write' - if isinstance(data, unicode): # noqa + if isinstance(data, str): data = data.encode('utf-8') - self.file.write(self.encryptor.encrypt(data)) + self.file.write(self.encryptor.update(data)) def read(self, count=None): """Read `data` from the secure temporary file. This method may @@ -111,9 +116,23 @@ def read(self, count=None): self.last_action = 'read' if count: - return self.decryptor.decrypt(self.file.read(count)) + return self.decryptor.update(self.file.read(count)) else: - return self.decryptor.decrypt(self.file.read()) + return self.decryptor.update(self.file.read()) + + def close(self): + """The __del__ method in tempfile._TemporaryFileWrapper (which + SecureTemporaryFile class inherits from) calls close() when the + temporary file is deleted. + """ + try: + self.decryptor.finalize() + except AlreadyFinalized: + pass + + # Since tempfile._TemporaryFileWrapper.close() does other cleanup, + # (i.e. deleting the temp file on disk), we need to call it also. + super(SecureTemporaryFile, self).close() # python-gnupg will not recognize our SecureTemporaryFile as a stream-like type diff --git a/securedrop/source.py b/securedrop/source.py --- a/securedrop/source.py +++ b/securedrop/source.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import config +from sdconfig import config from source_app import create_app @@ -9,4 +9,4 @@ if __name__ == "__main__": # pragma: no cover debug = getattr(config, 'env', 'prod') != 'prod' - app.run(debug=debug, host='0.0.0.0', port=8080) + app.run(debug=debug, host='0.0.0.0', port=8080) # nosec diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -8,32 +8,73 @@ from os import path from sqlalchemy.orm.exc import NoResultFound -import crypto_util import i18n -import store import template_filters import version -from db import Source, db_session +from crypto_util import CryptoUtil +from db import db +from models import InstanceConfig, Source from request_that_secures_file_uploads import RequestThatSecuresFileUploads from source_app import main, info, api from source_app.decorators import ignore_static from source_app.utils import logged_in +from store import Storage + +import typing +# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from sdconfig import SDConfig # noqa: F401 def create_app(config): + # type: (SDConfig) -> Flask app = Flask(__name__, template_folder=config.SOURCE_TEMPLATES_DIR, static_folder=path.join(config.SECUREDROP_ROOT, 'static')) app.request_class = RequestThatSecuresFileUploads - app.config.from_object(config.SourceInterfaceFlaskConfig) + app.config.from_object(config.SourceInterfaceFlaskConfig) # type: ignore + app.sdconfig = config # The default CSRF token expiration is 1 hour. Since large uploads can # take longer than an hour over Tor, we increase the valid window to 24h. app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24 - CSRFProtect(app) + if config.DATABASE_ENGINE == "sqlite": + db_uri = (config.DATABASE_ENGINE + ":///" + + config.DATABASE_FILE) + else: + db_uri = ( + config.DATABASE_ENGINE + '://' + + config.DATABASE_USERNAME + ':' + + config.DATABASE_PASSWORD + '@' + + config.DATABASE_HOST + '/' + + config.DATABASE_NAME + ) + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + app.config['SQLALCHEMY_DATABASE_URI'] = db_uri + db.init_app(app) + + app.storage = Storage(config.STORE_DIR, + config.TEMP_DIR, + config.JOURNALIST_KEY) + + app.crypto_util = CryptoUtil( + scrypt_params=config.SCRYPT_PARAMS, + scrypt_id_pepper=config.SCRYPT_ID_PEPPER, + scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER, + securedrop_root=config.SECUREDROP_ROOT, + word_list=config.WORD_LIST, + nouns_file=config.NOUNS, + adjectives_file=config.ADJECTIVES, + gpg_key_dir=config.GPG_KEY_DIR, + ) + @app.errorhandler(CSRFError) def handle_csrf_error(e): msg = render_template('session_timeout.html') @@ -50,7 +91,8 @@ def handle_csrf_error(e): app.jinja_env.lstrip_blocks = True app.jinja_env.globals['version'] = version.__version__ if getattr(config, 'CUSTOM_HEADER_IMAGE', None): - app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE + app.jinja_env.globals['header_image'] = \ + config.CUSTOM_HEADER_IMAGE # type: ignore app.jinja_env.globals['use_custom_header_image'] = True else: app.jinja_env.globals['header_image'] = 'logo.png' @@ -62,7 +104,16 @@ def handle_csrf_error(e): app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat for module in [main, info, api]: - app.register_blueprint(module.make_blueprint(config)) + app.register_blueprint(module.make_blueprint(config)) # type: ignore + + @app.before_request + @ignore_static + def setup_i18n(): + """Store i18n-related values in Flask's special g object""" + g.locale = i18n.get_locale(config) + g.text_direction = i18n.get_text_direction(g.locale) + g.html_lang = i18n.locale_to_rfc_5646(g.locale) + g.locales = i18n.get_locale2name() @app.before_request @ignore_static @@ -79,14 +130,15 @@ def check_tor2web(): .format(url=url_for('info.tor2web_warning'))), "banner-warning") + @app.before_request + @ignore_static + def load_instance_config(): + app.instance_config = InstanceConfig.get_current() + @app.before_request @ignore_static def setup_g(): """Store commonly used values in Flask's special g object""" - g.locale = i18n.get_locale(config) - g.text_direction = i18n.get_text_direction(g.locale) - g.html_lang = i18n.locale_to_rfc_5646(g.locale) - g.locales = i18n.get_locale2name() if 'expires' in session and datetime.utcnow() >= session['expires']: msg = render_template('session_timeout.html') @@ -94,7 +146,9 @@ def setup_g(): # clear the session after we render the message so it's localized session.clear() + # Redirect to index with flashed message flash(Markup(msg), "important") + return redirect(url_for('main.index')) session['expires'] = datetime.utcnow() + \ timedelta(minutes=getattr(config, @@ -107,7 +161,7 @@ def setup_g(): # these common values. if logged_in(): g.codename = session['codename'] - g.filesystem_id = crypto_util.hash_codename(g.codename) + g.filesystem_id = app.crypto_util.hash_codename(g.codename) try: g.source = Source.query \ .filter(Source.filesystem_id == g.filesystem_id) \ @@ -119,13 +173,7 @@ def setup_g(): del session['logged_in'] del session['codename'] return redirect(url_for('main.index')) - g.loc = store.path(g.filesystem_id) - - @app.teardown_appcontext - def shutdown_session(exception=None): - """Automatically remove database sessions at the end of the request, or - when the application shuts down""" - db_session.remove() + g.loc = app.storage.path(g.filesystem_id) @app.errorhandler(404) def page_not_found(error): diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py --- a/securedrop/source_app/api.py +++ b/securedrop/source_app/api.py @@ -1,6 +1,7 @@ import json +import platform -from flask import Blueprint, make_response +from flask import Blueprint, current_app, make_response import version @@ -10,9 +11,13 @@ def make_blueprint(config): @view.route('/metadata') def metadata(): - meta = {'gpg_fpr': config.JOURNALIST_KEY, - 'sd_version': version.__version__, - } + meta = { + 'allow_document_uploads': current_app.instance_config.allow_document_uploads, + 'gpg_fpr': config.JOURNALIST_KEY, + 'sd_version': version.__version__, + 'server_os': platform.linux_distribution()[1], + 'supported_languages': config.SUPPORTED_LOCALES + } resp = make_response(json.dumps(meta)) resp.headers['Content-Type'] = 'application/json' return resp diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py --- a/securedrop/source_app/forms.py +++ b/securedrop/source_app/forms.py @@ -3,7 +3,7 @@ from wtforms import PasswordField from wtforms.validators import InputRequired, Regexp, Length -from db import Source +from models import Source class LoginForm(FlaskForm): diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py --- a/securedrop/source_app/info.py +++ b/securedrop/source_app/info.py @@ -1,7 +1,7 @@ -from cStringIO import StringIO -from flask import Blueprint, render_template, send_file +# -*- coding: utf-8 -*- +from flask import Blueprint, render_template, send_file, current_app -import crypto_util +from io import BytesIO # noqa def make_blueprint(config): @@ -17,8 +17,10 @@ def recommend_tor_browser(): @view.route('/journalist-key') def download_journalist_pubkey(): - journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY) - return send_file(StringIO(journalist_pubkey), + journalist_pubkey = current_app.crypto_util.gpg.export_keys( + config.JOURNALIST_KEY) + data = BytesIO(journalist_pubkey.encode('utf-8')) + return send_file(data, mimetype="application/pgp-keys", attachment_filename=config.JOURNALIST_KEY + ".asc", as_attachment=True) diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -1,5 +1,6 @@ import operator import os +import io from datetime import datetime from flask import (Blueprint, render_template, flash, redirect, url_for, g, @@ -7,11 +8,10 @@ from flask_babel import gettext from sqlalchemy.exc import IntegrityError -import crypto_util import store -from db import Source, db_session, Submission, Reply, get_one_or_else -from rm import srm +from db import db +from models import Source, Submission, Reply, get_one_or_else from source_app.decorators import login_required from source_app.utils import (logged_in, generate_unique_codename, async_genkey, normalize_timestamps, @@ -41,25 +41,41 @@ def generate(): session['new_user'] = True return render_template('generate.html', codename=codename) + @view.route('/org-logo') + def select_logo(): + if os.path.exists(os.path.join(current_app.static_folder, 'i', + 'custom_logo.png')): + return redirect(url_for('static', filename='i/custom_logo.png')) + else: + return redirect(url_for('static', filename='i/logo.png')) + @view.route('/create', methods=['POST']) def create(): - filesystem_id = crypto_util.hash_codename(session['codename']) + filesystem_id = current_app.crypto_util.hash_codename( + session['codename']) - source = Source(filesystem_id, crypto_util.display_id()) - db_session.add(source) + source = Source(filesystem_id, current_app.crypto_util.display_id()) + db.session.add(source) try: - db_session.commit() + db.session.commit() except IntegrityError as e: - db_session.rollback() + db.session.rollback() current_app.logger.error( "Attempt to create a source with duplicate codename: %s" % (e,)) # Issue 2386: don't log in on duplicates del session['codename'] + + # Issue 4361: Delete 'logged_in' if it's in the session + try: + del session['logged_in'] + except KeyError: + pass + abort(500) else: - os.mkdir(store.path(filesystem_id)) + os.mkdir(current_app.storage.path(filesystem_id)) session['logged_in'] = True return redirect(url_for('.lookup')) @@ -68,12 +84,19 @@ def create(): @login_required def lookup(): replies = [] - for reply in g.source.replies: - reply_path = store.path(g.filesystem_id, reply.filename) + source_inbox = Reply.query.filter(Reply.source_id == g.source.id) \ + .filter(Reply.deleted_by_source == False).all() # noqa + + for reply in source_inbox: + reply_path = current_app.storage.path( + g.filesystem_id, + reply.filename, + ) try: - reply.decrypted = crypto_util.decrypt( - g.codename, - open(reply_path).read()).decode('utf-8') + with io.open(reply_path, "rb") as f: + contents = f.read() + reply_obj = current_app.crypto_util.decrypt(g.codename, contents) + reply.decrypted = reply_obj except UnicodeDecodeError: current_app.logger.error("Could not decode reply %s" % reply.filename) @@ -88,29 +111,41 @@ def lookup(): # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) - if not crypto_util.getkey(g.filesystem_id) and g.source.flagged: - async_genkey(g.filesystem_id, g.codename) + if not current_app.crypto_util.getkey(g.filesystem_id) and \ + g.source.flagged: + db_uri = current_app.config['SQLALCHEMY_DATABASE_URI'] + async_genkey(current_app.crypto_util, + db_uri, + g.filesystem_id, + g.codename) return render_template( 'lookup.html', + allow_document_uploads=current_app.instance_config.allow_document_uploads, codename=g.codename, replies=replies, flagged=g.source.flagged, new_user=session.get('new_user', None), - haskey=crypto_util.getkey( + haskey=current_app.crypto_util.getkey( g.filesystem_id)) @view.route('/submit', methods=('POST',)) @login_required def submit(): + allow_document_uploads = current_app.instance_config.allow_document_uploads msg = request.form['msg'] - fh = request.files['fh'] + fh = None + if allow_document_uploads and 'fh' in request.files: + fh = request.files['fh'] # Don't submit anything if it was an "empty" submission. #878 if not (msg or fh): - flash(gettext( - "You must enter a message or choose a file to submit."), - "error") + if allow_document_uploads: + flash(gettext( + "You must enter a message or choose a file to submit."), + "error") + else: + flash(gettext("You must enter a message."), "error") return redirect(url_for('main.lookup')) fnames = [] @@ -120,7 +155,7 @@ def submit(): if msg: g.source.interaction_count += 1 fnames.append( - store.save_message_submission( + current_app.storage.save_message_submission( g.filesystem_id, g.source.interaction_count, journalist_filename, @@ -128,7 +163,7 @@ def submit(): if fh: g.source.interaction_count += 1 fnames.append( - store.save_file_submission( + current_app.storage.save_file_submission( g.filesystem_id, g.source.interaction_count, journalist_filename, @@ -152,9 +187,11 @@ def submit(): html_contents=html_contents) flash(Markup(msg), "success") + new_submissions = [] for fname in fnames: submission = Submission(g.source, fname) - db_session.add(submission) + db.session.add(submission) + new_submissions.append(submission) if g.source.pending: g.source.pending = False @@ -163,7 +200,12 @@ def submit(): # (gpg reads 300 bytes from /dev/random) entropy_avail = get_entropy_estimate() if entropy_avail >= 2400: - async_genkey(g.filesystem_id, g.codename) + db_uri = current_app.config['SQLALCHEMY_DATABASE_URI'] + + async_genkey(current_app.crypto_util, + db_uri, + g.filesystem_id, + g.codename) current_app.logger.info("generating key, entropy: {}".format( entropy_avail)) else: @@ -172,7 +214,11 @@ def submit(): entropy_avail)) g.source.last_updated = datetime.utcnow() - db_session.commit() + db.session.commit() + + for sub in new_submissions: + store.async_add_checksum_for_file(sub) + normalize_timestamps(g.filesystem_id) return redirect(url_for('main.lookup')) @@ -180,12 +226,18 @@ def submit(): @view.route('/delete', methods=('POST',)) @login_required def delete(): - query = Reply.query.filter( - Reply.filename == request.form['reply_filename']) + """This deletes the reply from the source's inbox, but preserves + the history for journalists such that they can view conversation + history. + """ + + query = Reply.query.filter_by( + filename=request.form['reply_filename'], + source_id=g.source.id) reply = get_one_or_else(query, current_app.logger, abort) - srm(store.path(g.filesystem_id, reply.filename)) - db_session.delete(reply) - db_session.commit() + reply.deleted_by_source = True + db.session.add(reply) + db.session.commit() flash(gettext("Reply deleted"), "notification") return redirect(url_for('.lookup')) @@ -193,16 +245,17 @@ def delete(): @view.route('/delete-all', methods=('POST',)) @login_required def batch_delete(): - replies = g.source.replies + replies = Reply.query.filter(Reply.source_id == g.source.id) \ + .filter(Reply.deleted_by_source == False).all() # noqa if len(replies) == 0: current_app.logger.error("Found no replies when at least one was " "expected") return redirect(url_for('.lookup')) for reply in replies: - srm(store.path(g.filesystem_id, reply.filename)) - db_session.delete(reply) - db_session.commit() + reply.deleted_by_source = True + db.session.add(reply) + db.session.commit() flash(gettext("All replies have been deleted"), "notification") return redirect(url_for('.lookup')) diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py --- a/securedrop/source_app/utils.py +++ b/securedrop/source_app/utils.py @@ -1,15 +1,17 @@ +import io import logging import subprocess from datetime import datetime from flask import session, current_app, abort, g +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker from threading import Thread -import crypto_util import i18n -import store -from db import Source, db_session +from crypto_util import CryptoException +from models import Source def logged_in(): @@ -18,8 +20,8 @@ def logged_in(): def valid_codename(codename): try: - filesystem_id = crypto_util.hash_codename(codename) - except crypto_util.CryptoException as e: + filesystem_id = current_app.crypto_util.hash_codename(codename) + except CryptoException as e: current_app.logger.info( "Could not compute filesystem ID for codename '{}': {}".format( codename, e)) @@ -32,8 +34,9 @@ def valid_codename(codename): def generate_unique_codename(config): """Generate random codenames until we get an unused one""" while True: - codename = crypto_util.genrandomid(Source.NUM_WORDS, - i18n.get_language(config)) + codename = current_app.crypto_util.genrandomid( + Source.NUM_WORDS, + i18n.get_language(config)) # The maximum length of a word in the wordlist is 9 letters and the # codename length is 7 words, so it is currently impossible to @@ -48,7 +51,9 @@ def generate_unique_codename(config): "(Codename='{}')".format(codename)) continue - filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow) + # scrypt (slow) + filesystem_id = current_app.crypto_util.hash_codename(codename) + matching_sources = Source.query.filter( Source.filesystem_id == filesystem_id).all() if len(matching_sources) == 0: @@ -56,32 +61,38 @@ def generate_unique_codename(config): def get_entropy_estimate(): - return int(open('/proc/sys/kernel/random/entropy_avail').read()) + with io.open('/proc/sys/kernel/random/entropy_avail') as f: + return int(f.read()) -def async(f): +def asynchronous(f): def wrapper(*args, **kwargs): thread = Thread(target=f, args=args, kwargs=kwargs) thread.start() return wrapper -@async -def async_genkey(filesystem_id, codename): - crypto_util.genkeypair(filesystem_id, codename) +@asynchronous +def async_genkey(crypto_util_, db_uri, filesystem_id, codename): + # We pass in the `crypto_util_` so we don't have to reference `current_app` + # here. The app might not have a pushed context during testing which would + # cause this asynchronous function to break. + crypto_util_.genkeypair(filesystem_id, codename) # Register key generation as update to the source, so sources will # filter to the top of the list in the journalist interface if a # flagged source logs in and has a key generated for them. #789 + session = sessionmaker(bind=create_engine(db_uri))() try: - source = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one() + source = session.query(Source).filter( + Source.filesystem_id == filesystem_id).one() source.last_updated = datetime.utcnow() - db_session.commit() + session.commit() except Exception as e: logging.getLogger(__name__).error( "async_genkey for source (filesystem_id={}): {}" .format(filesystem_id, e)) + session.close() def normalize_timestamps(filesystem_id): @@ -90,7 +101,7 @@ def normalize_timestamps(filesystem_id): the latest submission. This minimizes metadata that could be useful to investigators. See #301. """ - sub_paths = [store.path(filesystem_id, submission.filename) + sub_paths = [current_app.storage.path(filesystem_id, submission.filename) for submission in g.source.submissions] if len(sub_paths) > 1: args = ["touch"] diff --git a/securedrop/store.py b/securedrop/store.py --- a/securedrop/store.py +++ b/securedrop/store.py @@ -1,161 +1,396 @@ # -*- coding: utf-8 -*- +import binascii +import gzip import os import re -import config -import zipfile -import crypto_util import tempfile -import gzip +import zipfile + +from flask import current_app +from hashlib import sha256 +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker from werkzeug.utils import secure_filename from secure_tempfile import SecureTemporaryFile -import logging -log = logging.getLogger(__name__) +import rm +from worker import create_queue + + +import typing + +if typing.TYPE_CHECKING: + # flake8 can not understand type annotation yet. + # That is why all type annotation relative import + # statements has to be marked as noqa. + # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401 + from typing import List, Type, Union # noqa: F401 + from tempfile import _TemporaryFileWrapper # type: ignore # noqa: F401 + from io import BufferedIOBase # noqa: F401 + from sqlalchemy.orm import Session # noqa: F401 + from models import Reply, Submission # noqa: F401 + VALIDATE_FILENAME = re.compile( - "^(?P<index>\d+)\-[a-z0-9-_]*" - "(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match + r"^(?P<index>\d+)\-[a-z0-9-_]*(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$" +).match class PathException(Exception): - """An exception raised by `util.verify` when it encounters a bad path. A path can be bad when it is not absolute or not normalized. """ pass -def verify(p): - """Assert that the path is absolute, normalized, inside `config.STORE_DIR`, and - matches the filename format. +class TooManyFilesException(Exception): + """An exception raised by path_without_filesystem_id when too many + files has been found for a given submission or reply. + This could be due to a very unlikely collision between + journalist_designations. + """ + pass + + +class NoFileFoundException(Exception): + """An exception raised by path_without_filesystem_id when a file could + not be found for a given submission or reply. + This is likely due to an admin manually deleting files from the server. + """ + pass + + +class NotEncrypted(Exception): + """An exception raised if a file expected to be encrypted client-side + is actually plaintext. """ - if not os.path.isabs(config.STORE_DIR): - raise PathException("config.STORE_DIR(%s) is not absolute" % ( - config.STORE_DIR, )) - - # os.path.abspath makes the path absolute and normalizes - # '/foo/../bar' to '/bar', etc. We have to check that the path is - # normalized before checking that it starts with the - # `config.STORE_DIR` or else a malicious actor could append a - # bunch of '../../..' to access files outside of the store. - if not p == os.path.abspath(p): - raise PathException("The path is not absolute and/or normalized") - - # Check that the path p is in config.STORE_DIR - if os.path.relpath(p, config.STORE_DIR).startswith('..'): - raise PathException("Invalid directory %s" % (p, )) - - if os.path.isfile(p): - filename = os.path.basename(p) - ext = os.path.splitext(filename)[-1] - if filename == '_FLAG': - return True - if ext != '.gpg': - # if there's an extension, verify it's a GPG - raise PathException("Invalid file extension %s" % (ext, )) - if not VALIDATE_FILENAME(filename): - raise PathException("Invalid filename %s" % (filename, )) - - -def path(*s): - """Get the normalized, absolute file path, within `config.STORE_DIR`.""" - joined = os.path.join(os.path.abspath(config.STORE_DIR), *s) - absolute = os.path.abspath(joined) - verify(absolute) - return absolute - - -def get_bulk_archive(selected_submissions, zip_directory=''): - """Generate a zip file from the selected submissions""" - zip_file = tempfile.NamedTemporaryFile(prefix='tmp_securedrop_bulk_dl_', - dir=config.TEMP_DIR, - delete=False) - sources = set([i.source.journalist_designation - for i in selected_submissions]) - # The below nested for-loops are there to create a more usable - # folder structure per #383 - with zipfile.ZipFile(zip_file, 'w') as zip: - for source in sources: - fname = "" - submissions = [s for s in selected_submissions - if s.source.journalist_designation == source] - for submission in submissions: - filename = path(submission.source.filesystem_id, - submission.filename) - verify(filename) - document_number = submission.filename.split('-')[0] - if zip_directory == submission.source.journalist_filename: - fname = zip_directory - else: - fname = os.path.join(zip_directory, source) - zip.write(filename, arcname=os.path.join( - fname, - "%s_%s" % (document_number, - submission.source.last_updated.date()), - os.path.basename(filename) - )) - return zip_file - - -def save_file_submission(filesystem_id, count, journalist_filename, filename, - stream): - sanitized_filename = secure_filename(filename) - - # We store file submissions in a .gz file for two reasons: - # - # 1. Downloading large files over Tor is very slow. If we can - # compress the file, we can speed up future downloads. - # - # 2. We want to record the original filename because it might be - # useful, either for context about the content of the submission - # or for figuring out which application should be used to open - # it. However, we'd like to encrypt that info and have the - # decrypted file automatically have the name of the original - # file. Given various usability constraints in GPG and Tails, this - # is the most user-friendly way we have found to do this. - - encrypted_file_name = "{0}-{1}-doc.gz.gpg".format( - count, - journalist_filename) - encrypted_file_path = path(filesystem_id, encrypted_file_name) - with SecureTemporaryFile("/tmp") as stf: - with gzip.GzipFile(filename=sanitized_filename, - mode='wb', fileobj=stf) as gzf: - # Buffer the stream into the gzip file to avoid excessive - # memory consumption - while True: - buf = stream.read(1024 * 8) - if not buf: - break - gzf.write(buf) - - crypto_util.encrypt(stf, config.JOURNALIST_KEY, encrypted_file_path) - - return encrypted_file_name - - -def save_message_submission(filesystem_id, count, journalist_filename, - message): - filename = "{0}-{1}-msg.gpg".format(count, journalist_filename) - msg_loc = path(filesystem_id, filename) - crypto_util.encrypt(message, config.JOURNALIST_KEY, msg_loc) - return filename - - -def rename_submission(filesystem_id, orig_filename, journalist_filename): - check_submission_name = VALIDATE_FILENAME(orig_filename) - if check_submission_name: - parsed_filename = check_submission_name.groupdict() - if parsed_filename.get('file_type'): - new_filename = "{}-{}-{}.gpg".format( - parsed_filename['index'], journalist_filename, - parsed_filename['file_type']) - try: - os.rename(path(filesystem_id, orig_filename), - path(filesystem_id, new_filename)) - except OSError: - pass - else: - return new_filename # Only return new filename if successful - return orig_filename + pass + + +def safe_renames(old, new): + """safe_renames(old, new) + + This is a modified version of Python's os.renames that does not + prune directories. + Super-rename; create directories as necessary without deleting any + left empty. Works like rename, except creation of any intermediate + directories needed to make the new pathname good is attempted + first. + Note: this function can fail with the new directory structure made + if you lack permissions needed to unlink the leaf directory or + file. + """ + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + os.rename(old, new) + + +class Storage: + + def __init__(self, storage_path, temp_dir, gpg_key): + # type: (str, str, str) -> None + if not os.path.isabs(storage_path): + raise PathException("storage_path {} is not absolute".format( + storage_path)) + self.__storage_path = storage_path + + if not os.path.isabs(temp_dir): + raise PathException("temp_dir {} is not absolute".format( + temp_dir)) + self.__temp_dir = temp_dir + + self.__gpg_key = gpg_key + + # where files and directories are sent to be securely deleted + self.__shredder_path = os.path.abspath(os.path.join(self.__storage_path, "../shredder")) + if not os.path.exists(self.__shredder_path): + os.makedirs(self.__shredder_path, mode=0o700) + + @property + def storage_path(self): + return self.__storage_path + + @property + def shredder_path(self): + return self.__shredder_path + + def shredder_contains(self, path: str) -> bool: + """ + Returns True if the fully-resolved path lies within the shredder. + """ + common_path = os.path.commonpath((os.path.realpath(path), self.__shredder_path)) + return common_path == self.__shredder_path + + def store_contains(self, path: str) -> bool: + """ + Returns True if the fully-resolved path lies within the store. + """ + common_path = os.path.commonpath((os.path.realpath(path), self.__storage_path)) + return common_path == self.__storage_path + + def verify(self, p: str) -> bool: + """ + Verify that a given path is valid for the store. + """ + + if self.store_contains(p): + # verifying a hypothetical path + if not os.path.exists(p): + return True + + # extant paths must be directories or correctly-named plain files + if os.path.isdir(p): + return True + + if os.path.isfile(p) and VALIDATE_FILENAME(os.path.basename(p)): + return True + + raise PathException("Path not valid in store: {}".format(p)) + + def path(self, filesystem_id: str, filename: str = '') -> str: + """ + Returns the path resolved within `self.__storage_path`. + + Raises PathException if `verify` doesn't like the path. + """ + joined = os.path.join(os.path.realpath(self.__storage_path), filesystem_id, filename) + absolute = os.path.realpath(joined) + if not self.verify(absolute): + raise PathException( + """Could not resolve ("{}", "{}") to a path within the store.""".format( + filesystem_id, filename + ) + ) + return absolute + + def path_without_filesystem_id(self, filename): + # type: (str) -> str + """Get the normalized, absolute file path, within + `self.__storage_path` for a filename when the filesystem_id + is not known. + """ + + joined_paths = [] + for rootdir, _, files in os.walk(os.path.realpath(self.__storage_path)): + for file_ in files: + if file_ in filename: + joined_paths.append(os.path.join(rootdir, file_)) + + if len(joined_paths) > 1: + raise TooManyFilesException('Found duplicate files!') + elif len(joined_paths) == 0: + raise NoFileFoundException('File not found: {}'.format(filename)) + else: + absolute = joined_paths[0] + + if not self.verify(absolute): + raise PathException( + """Could not resolve "{}" to a path within the store.""".format(filename) + ) + return absolute + + def get_bulk_archive(self, selected_submissions, zip_directory=''): + # type: (List, str) -> _TemporaryFileWrapper + """Generate a zip file from the selected submissions""" + zip_file = tempfile.NamedTemporaryFile( + prefix='tmp_securedrop_bulk_dl_', + dir=self.__temp_dir, + delete=False) + sources = set([i.source.journalist_designation + for i in selected_submissions]) + # The below nested for-loops are there to create a more usable + # folder structure per #383 + with zipfile.ZipFile(zip_file, 'w') as zip: + for source in sources: + fname = "" + submissions = [s for s in selected_submissions + if s.source.journalist_designation == source] + for submission in submissions: + filename = self.path(submission.source.filesystem_id, submission.filename) + document_number = submission.filename.split('-')[0] + if zip_directory == submission.source.journalist_filename: + fname = zip_directory + else: + fname = os.path.join(zip_directory, source) + zip.write(filename, arcname=os.path.join( + fname, + "%s_%s" % (document_number, + submission.source.last_updated.date()), + os.path.basename(filename) + )) + return zip_file + + def move_to_shredder(self, path: str): + """ + Moves content from the store to the shredder for secure deletion. + + Python's safe_renames (and the underlying rename(2) calls) will + silently overwrite content, which could bypass secure + deletion, so we create a temporary directory under the + shredder directory and move the specified content there. + + This function is intended to be atomic and quick, for use in + deletions via the UI and API. The actual secure deletion is + performed by an asynchronous process that monitors the + shredder directory. + """ + if not self.verify(path): + raise ValueError( + """Path is not within the store: "{}" """.format(path) + ) + + if not os.path.exists(path): + raise ValueError( + """Path does not exist: "{}" """.format(path) + ) + + relpath = os.path.relpath(path, start=self.storage_path) + dest = os.path.join(tempfile.mkdtemp(dir=self.__shredder_path), relpath) + current_app.logger.info("Moving {} to shredder: {}".format(path, dest)) + safe_renames(path, dest) + + def clear_shredder(self): + current_app.logger.info("Clearing shredder") + directories = [] + targets = [] + for directory, subdirs, files in os.walk(self.shredder_path): + for subdir in subdirs: + real_subdir = os.path.realpath(os.path.join(directory, subdir)) + if self.shredder_contains(real_subdir): + directories.append(real_subdir) + for f in files: + abs_file = os.path.abspath(os.path.join(directory, f)) + if os.path.islink(abs_file): + # Somehow, a symbolic link was created in the + # store. This shouldn't happen in normal + # operations. Just remove the link; don't try to + # shred its target. Note that we only have special + # handling for symlinks. Hard links -- which + # again, shouldn't occur in the store -- will + # result in the file data being shredded once for + # each link. + current_app.logger.info( + "Deleting link {} to {}".format( + abs_file, os.readlink(abs_file) + ) + ) + os.unlink(abs_file) + continue + if self.shredder_contains(abs_file): + targets.append(abs_file) + + target_count = len(targets) + current_app.logger.info("Files to delete: {}".format(target_count)) + for i, t in enumerate(targets, 1): + current_app.logger.info("Securely deleting file {}/{}: {}".format(i, target_count, t)) + rm.secure_delete(t) + current_app.logger.info("Securely deleted file {}/{}: {}".format(i, target_count, t)) + + directories_to_remove = set(directories) + dir_count = len(directories_to_remove) + for i, d in enumerate(reversed(sorted(directories_to_remove)), 1): + current_app.logger.debug("Removing directory {}/{}: {}".format(i, dir_count, d)) + os.rmdir(d) + current_app.logger.debug("Removed directory {}/{}: {}".format(i, dir_count, d)) + + def save_file_submission(self, filesystem_id, count, journalist_filename, + filename, stream): + # type: (str, int, str, str, BufferedIOBase) -> str + sanitized_filename = secure_filename(filename) + + # We store file submissions in a .gz file for two reasons: + # + # 1. Downloading large files over Tor is very slow. If we can + # compress the file, we can speed up future downloads. + # + # 2. We want to record the original filename because it might be + # useful, either for context about the content of the submission + # or for figuring out which application should be used to open + # it. However, we'd like to encrypt that info and have the + # decrypted file automatically have the name of the original + # file. Given various usability constraints in GPG and Tails, this + # is the most user-friendly way we have found to do this. + + encrypted_file_name = "{0}-{1}-doc.gz.gpg".format( + count, + journalist_filename) + encrypted_file_path = self.path(filesystem_id, encrypted_file_name) + with SecureTemporaryFile("/tmp") as stf: # nosec + with gzip.GzipFile(filename=sanitized_filename, + mode='wb', fileobj=stf, mtime=0) as gzf: + # Buffer the stream into the gzip file to avoid excessive + # memory consumption + while True: + buf = stream.read(1024 * 8) + if not buf: + break + gzf.write(buf) + + current_app.crypto_util.encrypt( + stf, self.__gpg_key, encrypted_file_path) + + return encrypted_file_name + + def save_pre_encrypted_reply(self, filesystem_id, count, + journalist_filename, content): + # type: (str, int, str, str) -> str + if '-----BEGIN PGP MESSAGE-----' not in content.split('\n')[0]: + raise NotEncrypted + + encrypted_file_name = "{0}-{1}-reply.gpg".format(count, + journalist_filename) + encrypted_file_path = self.path(filesystem_id, encrypted_file_name) + + with open(encrypted_file_path, 'w') as fh: + fh.write(content) + + return encrypted_file_path + + def save_message_submission(self, filesystem_id, count, + journalist_filename, message): + # type: (str, int, str, str) -> str + filename = "{0}-{1}-msg.gpg".format(count, journalist_filename) + msg_loc = self.path(filesystem_id, filename) + current_app.crypto_util.encrypt(message, self.__gpg_key, msg_loc) + return filename + + +def async_add_checksum_for_file(db_obj): + # type: (Union[Submission, Reply]) -> str + return create_queue().enqueue( + queued_add_checksum_for_file, + type(db_obj), + db_obj.id, + current_app.storage.path(db_obj.source.filesystem_id, db_obj.filename), + current_app.config['SQLALCHEMY_DATABASE_URI'], + ) + + +def queued_add_checksum_for_file(db_model, model_id, file_path, db_uri): + # type: (Union[Type[Submission], Type[Reply]], int, str, str) -> str + # we have to create our own DB session because there is no app context + session = sessionmaker(bind=create_engine(db_uri))() + db_obj = session.query(db_model).filter_by(id=model_id).one() + add_checksum_for_file(session, db_obj, file_path) + # We need to return a non-`None` value so the rq worker writes this back to Redis + return "success" + + +def add_checksum_for_file(session, db_obj, file_path): + # type: (Session, Union[Submission, Reply], str) -> None + hasher = sha256() + with open(file_path, 'rb') as f: + while True: + read_bytes = f.read(4096) + if not read_bytes: + break + hasher.update(read_bytes) + + digest = binascii.hexlify(hasher.digest()).decode('utf-8') + digest_str = u'sha256:' + digest + db_obj.checksum = digest_str + + session.add(db_obj) + session.commit() diff --git a/securedrop/template_filters.py b/securedrop/template_filters.py --- a/securedrop/template_filters.py +++ b/securedrop/template_filters.py @@ -18,7 +18,7 @@ def rel_datetime_format(dt, fmt=None, relative=False): def nl2br(context, value): - formatted = u'<br>\n'.join(escape(value).split('\n')) + formatted = '<br>\n'.join(escape(value).split('\n')) if context.autoescape: formatted = Markup(formatted) return formatted diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '0.5.2' +__version__ = '1.2.2' diff --git a/securedrop/worker.py b/securedrop/worker.py --- a/securedrop/worker.py +++ b/securedrop/worker.py @@ -1,14 +1,127 @@ +import logging import os +from typing import Optional, List from redis import Redis -from rq import Queue +from rq.queue import Queue +from rq.worker import Worker, WorkerStatus +from rq.exceptions import InvalidJobOperation, NoSuchJobError +from rq.registry import StartedJobRegistry -queue_name = 'test' if os.environ.get( - 'SECUREDROP_ENV') == 'test' else 'default' +from sdconfig import config -# `srm` can take a long time on large files, so allow it run for up to an hour -q = Queue(name=queue_name, connection=Redis(), default_timeout=3600) +def create_queue(name=None, timeout=3600): + # type: (str, int) -> Queue + """ + Create an rq ``Queue`` named ``name`` with default timeout ``timeout``. -def enqueue(*args, **kwargs): - return q.enqueue(*args, **kwargs) + If ``name`` is omitted, ``config.RQ_WORKER_NAME`` is used. + """ + if name is None: + name = config.RQ_WORKER_NAME + q = Queue(name=name, connection=Redis(), default_timeout=timeout) + return q + + +def rq_workers(queue=None): + # type: (Queue) -> List[Worker] + """ + Returns the list of current rq ``Worker``s. + """ + + return Worker.all(connection=Redis(), queue=queue) + + +def worker_for_job(job_id): + # type: (str) -> Optional[Worker] + """ + If the job is being run, return its ``Worker``. + """ + for worker in rq_workers(): + # If the worker process no longer exists, skip it. From "man 2 + # kill": "If sig is 0, then no signal is sent, but existence + # and permission checks are still performed; this can be used + # to check for the existence of a process ID or process group + # ID that the caller is permitted to signal." + try: + os.kill(worker.pid, 0) + except OSError: + continue + + # If it's running and working on the given job, return it. + if worker.state == WorkerStatus.BUSY and job_id == worker.get_current_job_id(): + return worker + return None + + +def requeue_interrupted_jobs(queue_name=None): + # type: (str) -> None + """ + Requeues jobs found in the given queue's started job registry. + + Only restarts those that aren't already queued or being run. + + When rq starts a job, it records it in the queue's started job + registry. If the server is rebooted before the job completes, the + job is not automatically restarted from the information in the + registry. For tasks like secure deletion of files, this means that + information thought to be deleted is still present in the case of + seizure or compromise. We have manage.py tasks to clean such files + up, but this utility attempts to reduce the need for manual + intervention by automatically resuming interrupted jobs. + + This function is predicated on a risky assumption: that all jobs + are idempotent. At time of writing, we use rq for securely + deleting submission files and hashing submissions for the ETag + header. Both of these can be safely repeated. If we add rq tasks + that cannot, this function should be improved to omit those. + """ + queue = create_queue(queue_name) + started_job_registry = StartedJobRegistry(queue=queue) + + queued_job_ids = queue.get_job_ids() + logging.debug("queued jobs: {}".format(queued_job_ids)) + started_job_ids = started_job_registry.get_job_ids() + logging.debug("started jobs: {}".format(started_job_ids)) + job_ids = [j for j in started_job_ids if j not in queued_job_ids] + logging.debug("candidate job ids: {}".format(job_ids)) + + if not job_ids: + logging.info("No interrupted jobs found in started job registry.") + + for job_id in job_ids: + logging.debug("Considering job %s", job_id) + try: + job = started_job_registry.job_class.fetch(job_id, started_job_registry.connection) + except NoSuchJobError as e: + logging.error( + "Could not find details for job %s: %s", job_id, e + ) + continue + + logging.debug( + "Job %s enqueued at %s, started at %s", job_id, job.enqueued_at, job.started_at + ) + + worker = worker_for_job(job_id) + if worker: + logging.info( + "Skipping job %s, which is already being run by worker %s", job_id, worker.key + ) + continue + + logging.info("Requeuing job %s", job) + + try: + started_job_registry.remove(job) + except InvalidJobOperation as e: + logging.error("Could not remove job %s from started job registry: %s", job, e) + continue + + try: + queue.enqueue_job(job) + logging.debug("Job now enqueued at %s, started at %s", job.enqueued_at, job.started_at) + except Exception as e: + logging.error("Could not requeue job %s: %s", job, e) + continue diff --git a/setup.py b/setup.py new file mode 100644 --- /dev/null +++ b/setup.py @@ -0,0 +1,24 @@ +import setuptools + +long_description = "The SecureDrop whistleblower platform." + +setuptools.setup( + name="securedrop-app-code", + version="1.2.2", + author="Freedom of the Press Foundation", + author_email="[email protected]", + description="SecureDrop Server", + long_description=long_description, + long_description_content_type="text/markdown", + license="GPLv3+", + python_requires=">=3.5", + url="https://github.com/freedomofpress/securedrop", + classifiers=( + "Development Status :: 5 - Stable", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + ), +)
diff --git a/admin/tests/files/SecureDrop.asc b/admin/tests/files/SecureDrop.asc new file mode 100644 --- /dev/null +++ b/admin/tests/files/SecureDrop.asc @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v2.0.19 (GNU/Linux) + +mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t +wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU ++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM +KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB +0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg +u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2 +ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B +5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf +CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7 +hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf +yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB +tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP +RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS +m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r +FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/ +JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r +q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn +5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg +b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi +AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW +iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3 +cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai +R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj +EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj +LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6 ++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC +vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq +95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf +T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls +9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC +KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p +N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21 +OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h +h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls +Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f +FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA +6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3 +inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA +KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn +IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f +4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK +n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ +BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF +z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI +k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS +iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ +Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8 +=XVz8 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/admin/tests/files/ca.crt b/admin/tests/files/ca.crt new file mode 100644 --- /dev/null +++ b/admin/tests/files/ca.crt @@ -0,0 +1 @@ +TEST FILE ONLY diff --git a/admin/tests/files/corrupted b/admin/tests/files/corrupted new file mode 100644 --- /dev/null +++ b/admin/tests/files/corrupted @@ -0,0 +1,2 @@ +app_hostname: app +app_ip: [10.20.2.2 diff --git a/admin/tests/files/key.asc b/admin/tests/files/key.asc new file mode 100644 --- /dev/null +++ b/admin/tests/files/key.asc @@ -0,0 +1,281 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFQAYqYBEAChvZWXGBoliCiePTZ93B0ScCspod2DCGMRA2AiUExnf0IxqOOq +aBnc7l1jUdwRent+uty3483sHUcjbBjE4BHCGRfQWH+YZjiIHGyOyhUR5x3Uvslk +b6SPrQv4EKcev+Wcr9hOnahBLf+EjsJc3xTK3Kk7Cju0RBHPPtQ2E2j/EUTwAHHk +EoN9VEqJaCdqLHewIHcCkTMJUvulVbABUYcDGsPXAfb2pZ0e+ga13KdrOZgR5sdH +nj+tcEYpC1y01PL3HtQdh1VgK1iJglM3HdzIMY7omAp4GeznNIyL8ZW33mZsvTU5 +DWDhYnCbZlWqIQgBC64Q6iwDOI6POuOief9LXlRz5VL5PsRWZc/R7LQ5iThA8I05 +jxXkGB4NRi9/Z2CO7ROhZVAU1WeahQcISSrgk+1UGzpxdDZulX0JwpIPcIqDC234 +gImGiWx6N8iqOIAT+gOKLwThe7yzm0wbZ8rpNTZBPen3ygHA94YM1VpL3Gk/K5/g +PqUm0/eYkK6vP/ZRDdzbR+WxN3GZi/sF+CHkJSCIEm9GfjXRmKS0h6IHzqvbjH5h +Dh1jPRy0D5dyOQoYtw3gCiv5adAdZEyBECs24v5+RujgK2Q+L5l88lI9nC8VkuBe +VDK3JjZmwVLdpSQ3UCVCux59qe/8F8sowXUHL7Q83n9/9SVdvUfQqVOD9wARAQAB +tCJMb2ljIERhY2hhcnkgKE91b1UpIDxsb2ljQGdudS5vcmc+iQI3BBMBCAAhBQJU +AGa2AhsDBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEJktI7OS+eTyOZgP/2CW +EXXPGNhokLKdGprr1/dGpgVoaWyc9zcPbjOhrKOQfJ5JLzVN5vxMj3DrvjriFA7g +6ntcFpAc8xcvmgp1FfiUve5Wrp9EOtIstuyS4yOpEZ5c4e5nByChT8s1CivCEd/R +UBIrehGH0e5aURss1F0VQP+u2gI/Zq3B/SpkG48qtgnZloZqlXtTEZQZcEPyzF39 +Hb5ncEELgQI6W6ldamQ6EcMzJUlLMxzuztFhLKQ+DS358IDohkh2rgJN6vuvSMJh +J57YNBDz3A6MLXey03Vm5NWoZJzp4MusecZELTsYkRMJXPPeET7u55YbQYBFtLFX +tB2pqpIPCWK/4LQ6vscGaf+qgvc1CXBCUmpXndSgefZXM6BNxSG5wr7P4j6sNlsI +4p8Y3CnRgJgMP78/pSuOmVA0RKGzErYHZsRnfudt6irPC75Bk3m5k8d22x3ZM2EN +bcxnjkb7esc/pLVFeFt1kvk5x6YN+0ZOa0phR5xg1wQ3uBNgpvRB/EmluWBwbQoe +ScHAgHeuUB3dKvUcSFX0S5YrJ9kzc5LA8WNqhEfjOQpQ7FDvS/cvNqd5u4bE2NL3 +9aFjjB5S+KwNHf9uMky9Rt65pV585JK5MmpQXp0m6sMJAMTtRh/BAm2xpNqXi8bL +aATCj81s36BXdAA67l3IuXIO05PKiLtjJDQg+SsDiEYEEBEIAAYFAlQAZvkACgkQ +8dLMyEl6F22/8wCeOdprYHGpSrQgENJnqlflBD3jeskAniWg5pOraywAzXGNN3bD +uF9TOY9siQEcBBABAgAGBQJUBI1NAAoJEPnq46ZYGdfo8kgH/2VGu1C4eopujw8T +TRF3d5PtrGgh1JKpy6atMlS/v2U5gw/SPXAN7ht2PsPu1fBg8dJiN2X75wmyDVr6 +28364KPaiVqFBzm1jj+u7811MulvqKMqKVHc3LsvDsS19xo2VJLSn1rtJBMrtXoI +Iszh9myn9xKF5GtTToSe5aRjPIR3HMKsPokDGDdTAYtwrzx5vcVkgOzmUqvqEY6K +nqRPYoNFPjc/wVuvlRRiSzGvPv5JSjSjYSFUPdc1cQSkjw7ublQt955Rsbpbg/j3 +6F9ls+RBM3Y1eMp1II15N8W6iUvWooawXUZAdsEQcwdnLiSuL9saREtvhROEexgB +Ng8eblSJAhwEEAEIAAYFAlSUoaYACgkQuOUId2ZHWq9C/xAAij3vhMkISEveEKHA +iZgON12xGYojRfZ5HojZ9/e/fkEhgBr+YvAZ8bL9i+Axp7V6tVV/OhjmR8fzqE6l +EJAjuzin/E1qLAXR+hus7sZCCxJXNnMRSCJU2vmg5v50gLEd2soVPQpCtAho/e5K +95JwhXYhWzwcijPmTlv26U4jxt9MR2mkDIhW/OtK1KlcXbDNE5fXN/iejVCV9PQy +ckKL5sd9ZRXiUsWgeiqzZ+avA/b0i1kwWwau2+r745i9VLlQeojoJU3N77XeXtSK +VILFWGHNO+Ubif/pq+WnNfyJlxMGjl+9jpi8rQzCm6V0u5V8/MlUSkCnciJRdlay +SDuo5UwGExt0jybywkn3A3q5Nvaa1XXn1qR7/EPeNr1zHRu7aIcpQfcVOnLlL6Oy +dYm7MuxOMBPLePCW72pQIxz5pcV+TBbp80lTj1TqHSqtibBtLPSF2FTKJcVwDQPb +MRJUhR4MKHtVdmiQwcXH/mxsE/mOu6SFPxvXuC7B/0RSsvcTVj89CP6Bgy7LT0sQ +TsbRr8hnhDHuy2GIqx/4UExcomcUnBVDL5DtNhdRzbzLugshk1znutoxsGaVwVYV +xfIg64Azf4tmb4mi7D6Adshn4ULFoOo/h2v2bZw3WmeUoEZWN0M4BAWgz4aMrZHn +1ayD0WVSPfsx9DBOSgcsi93lMjGJAhwEEAEIAAYFAlQEroQACgkQ6UHe2nxbtqWP +1g//Tz5ITfYWU4UPzjqJVPCS9/xiuqbNSAzy38/9GLKG7JWDfwxfjInuiQxQD74Q +uQfWgeFHG72a8Z0M4sgx0RQKLDS4skIcsR9WRIXB6Y/IOBi4luU+rglEHeUQ/6Z/ +eLew2yCN5LfdBu/7EzJjn1Tny4YIJBfHL9UMzXkSEj/TW87iVyYPIAG/NClSA7ff +cwKLYBeQ9P4NQSyLn+TOKyIkSHemiaoYjJzagzIqLTlxJjRVaJN9VrY72etFlz2/ +mZ3s3WlRRj9JlCCmSPAf9nssORrqeso44VfJ1nzpOl45/RuLF0vWjS+JtAFtd5ye +gx2dfeSeLZT3fKpYx7gXHZEF5jwvHlwWGFopXtn5Z/5LMbIqBZ8+KrhMz4+Pu8dF +LgXgLvXYieN9VPI2MaJNu86lzokMJTzRAhsStHOVyOENRTaApT9JNYTx6d0OlH7m +wBLtiIL3pO7QQBy3lA08HDDY/7pLTOhuETpHIYCT6Ed0DbZKR5FhyG6w+lRVDJC5 +vAt74qmCc4X82sWb6IYpTt8ro8DJTS0F5K+xiTVHGvO3+DEaj0HXv9A1TX5DSe3K +sFj+RbLJCufsPv02dkD3mg/RPoiFFiSHVk9Omt8dWRVYk/WhCPQoNvcrLP7Zj8wu +8QrfGhDvGhxxysilKxEHlauXMAzmzkGc1v55W2lVb/Lw55+JAhwEEAECAAYFAlic +0mUACgkQLGRkryqOTAJjQQ//QgPPLKfVp2opN9ZWm7nFTzUKy51LNbjFFKiiaGVV +Svh2Gz6fcDYSNvsgYr00U6ET4h/aMBPY21YE+sndH5rEyln4Ux77Ul4yzQ62m20m +rzy63MAcMF1dfYb1vwAJWla0M/LnDJQCMSbeYXkvHeJ3DB8//CFKpD6T3UgsGi+J +snefB0l68rcR0/fxRnklmkfZ8M4J5Qpk94E8b6uuSBIQJg2RdNJTY2UKmSloIKw5 +Beh1xnovwGBJdfbwV9EDEupSF4qUfDwFFjxprskdtB+3+8MPjC5rOMW0dC5ojf5q +lWgRLDvihqepIx3n3yPynmSyNCnelAXUShEV2+iE1fe1WZ72f7Q+R4WRYRk5ScEr +mfgv46ssKXGKANq3C2bDEd2hKbS6eyO6lSY2qK1/CxYohXPkKfxjKRrr5BpoNKB+ +3eru6xTeSNmkneYrPBKKbsswJ85MwWCoKU/AVvml21f05sMX+CvJHzVFul/v4x3F +9L77a7FLCp5MJDM7Z1gxetA8otfJJsp/d4JRQteG/V9jwicTsh+gGCPZfY1jB6Rr +Noppn/f8igIjbg05rD4xeavk40SnTOju5J5Vtu+EiRxlW9oFIdAhh54MxQ5zMdlT +hMRzHlp/jWhrE1yG0MBRQfgePTv3AkIG2SqRM114yebE3paUF+Eyxn51EO5nqbsO +w5+JAhwEEAEIAAYFAljimIwACgkQnFA+4pt5PsUvshAAmFvbpj6mRRIpTbqLlPRU +wvDg7gUfQy6GDCGFnju4iKonje/LpQoKqRr5TK7ImE8IDPLuY1sBDJCAKQRa0L9Z +ZWcxqNx+xJrRUIQDXA3O27Pp6B988qUmkZsq3oj1pfEAcjgASKTSxZFCDAcsxecv +B2n/GXuYKpEKM29pybudcUZm0+lAHMFJDZ95lLXpq4mYQddAMJgwKG7wcYZhY9xb +NuMO/U3FfvVS/m1dkM6lIQbAOW1jqj12JS3e27s8haOCZmoLQ0Wr0KuiI17clzbC +RfjYSUQkWipeOS73XcnV/9bfN1hlkCIofF1h3RvxbGyhEB4fnITcdTevEbabVbOL +0agl8FOPcI/0gus6TT7Z+37/4LcYn9unSgmu86gUx+VVm8cR6MTBd5tAWceyZO8z +RuwpOpql+34MwMNBvUjZ1MKwDq7Nq/Jd8KRhFP+Q6iWh6rqFmJ8vxK6eRGaXTGrY +QWW43bngUuUOSpMIgt75efaF8IzK3WnNnJmOcA08DHFUK7273VdUmlZxhVU9feDP +Ywfq6QSAdr+2SsSMvgTVFVelV8YP6L5zM5NCCtRBKH02HsIKo0IdaeGAmZw37Jl1 +j44CZCe6f/9fd+O+TUL4NtXgeLeH4pTwRzEKJ+ublUthGeYVrZiPiuInBMfO3Hul +exsXrj43w6S+q55B2XU9cvqJAhwEEAEIAAYFAlkeq8AACgkQhz3qMJ0TgH/UGBAA +iq1LhU9M9JkdFjDSXDXz6acAZksGcyVSorK98SULMzMzA++zUHgrQKVomXtK8AXR +JlOZ/+JXw2PUZviCYlh1+0WW+kZ+/ZGGe4J3PCO2iIQhqlDECO1PoAxXg3iCLtdp +IJKdnBggLVet0U1JF14H1tyRXpXBBDedga3w0wrMUk03mleamuO1u2fLYQHgtqUx +qYw2WBWTBVGo8FBrat4GANNH+e0INcCNY1qOXuvxyhjrG2WntzX+b/iy+ldN90V4 +elfzs7kEh7BmnkUvBMKL4Q5r5eBf2pFoHeOf6qTGYrqBSHl3idBa+X6bCSH3XJkN +yAmZizyiQwhLLGQbKX1lX3YX1XRZnY6zwa50gI5UkRBCy86dynVGJ0okPIKKeUVZ +AcxxL0KOdY4ulZCOddc/NlSOwnOjO4M+7w/AN2qDnJaNlUxahmpvXK0Yx8aqbI4K +SPZMUifmpbglxU00yvD3y+CctuVfqNHVab4O1Zb1zejY829PAdVOcR82shLM/rqb +GmcVVnX0IH0+BysoCil4MEDBG/DE88Iw/I7Uq7gm+a7gSO6CO0l0Qlyz+ixBoSO4 +sZQ9VwRRKXubwak46DNR+pgq3kG8b1pFakqo8LlkJWAnu8DhgRt2cqeplwbbtKGb +NEW1gi/1xX0kTy9TNdiVk/0C72lzYxZBdim1yH1spCW0JkxvaWMgRGFjaGFyeSAo +T3VvVSkgPGxvaWNAZGFjaGFyeS5vcmc+iQI3BBMBCAAhBQJUAGKmAhsDBQsJCAcD +BRUKCQgLBRYCAwEAAh4BAheAAAoJEJktI7OS+eTyg0sP/1gZ2XBjfhdSugEmFDWw +4TfSTsBZAWzEVMomouZa/r2GxGvfiQKwgGGDaw1u/XvIfdjEDdwEk7L06dhyF8UB +jM4WidHrz+DYC9JRIxrzcNDaphhKlmXQzm6hZl0PEwvJ4MjbX5acI9MytUXcshkX +tJCeVUoQ0NnhD6UUvaVVsfJtWwDDR9RfzsHvegJMtLUt/GKfCtwrZTd2HiNrSpW/ +TPys718GDx9gpRbYpYJttueeOCd3rYjMdU3EtdR1gcWCc31Ke8iG5cLFH5VdiMn3 +bSu8CskCxSQ+bjvDHzgrpDNfq4LEK0i4QFnK1EL4A/U5/rZ+ql7hUDtad/wKNz2j +g7qJtuuNl5dMObFv3f/uy6tWiDqHgGLipwhZ4U9rgHu/nvo1869CKDSMj8GUOEng +Cw4qvvUEbLSFWequ6zywY9eSD3vAhWzdvN7gZRIxqJswoNiHQzQPAFROd6E6r3O3 +octyXDZ9pQM6AT8TbVSsmy5Uajd63OTW8PVvCWCyVLXcXNN5R6D08ue3ma05yFzx +9E1RiFZa7Jt55TAaidvuny7xm97FU2Pj6BOgdBSrcklorr+iL/h4FCEM/YvDSXrc +YeYgTGzGRMgMJBZjUyQiVaKfm2NQfcmqsIhcxsaJqnDyOOGpgHuWrVgmTG0CMbWD +48mwSaMbtRFN6RPKMDwwzPVEiEYEEBEIAAYFAlQAZvkACgkQ8dLMyEl6F22SZgCf +TTIJzMChBBqW83DPxgdq5I0hnJQAn00b+s5LqUEHDQml3XTs0moVwM90iQEcBBAB +AgAGBQJUBDQsAAoJEPnq46ZYGdfoByYIAIYR0LvBD7egGMFRADqCzi1j0cRzhZId +xjpARKWP45CsE/Jp18SXS+4YeVXPh55/oZSbIf7mdGLtDUi9pPP1Npkuxep+2IW7 +G/+u9KmGfNlviSilo63uT/53bSjgsLM4BtZACAG1jbbrozePCfUadlRMkVkg5hNJ +8AThlhGhFgzgOGTTFbOpSjUA2aCRNic0sWIS2yyMc5C7MSkQNpX9k3U6j+tpHkIW +9tIZC9yS51giWDjaCV1VILr6WgOS5CxSlbUcq7Cyt87HZccBeNRqyuEebkVb42GB +uxU0/dhSo3dab5JGknTLCphMKnWizqulPGxBxzJuvxtNIqyVdU/lWGmJAhwEEAEI +AAYFAlSUoaYACgkQuOUId2ZHWq9CCA/8DH6LW+J44g1HcKfOWTfzIYeksK+jDN/e +wxWS0u8LJSQ55cCXn3I7a7xiUIWSFL39fg5RHZaT/me3j4L/hwDuvabdY93VrPzv +MivXKCAG2uxYbKlNudyw/Q63atApwyvtguD3k8+BgbgcGqe9+AUmhnQTJg92FiTb +7loXw5lmJ561RcCaDA44bz+eWiEwoeUbwIDyJQ+aX49Zk1jjiTM3oL4l+TPo+p5i +k2TeVnJ4O9Q00OVnUgus9c54j5XZk2IRAyE9weeu6ZMHeoWrcgeCfZ/0J954t2Sl +JMGNXTtPMICOdGiq7a7+EbB69WKKlw9Wq/+fXXfwz0/nls+FWg3kt1mlkkbDenDw +KwjbAf30IzBEUOm9U1KPIlH7KTYkEm39Z1zFlTuNf1UIv/9yVROgHFd8cIpLQIfe +Zjgl9xyY6fFINRX30U+mbnZwEyLOaAd8HHlnuRz71FMV2A0hagSJELTpPl16Vkw9 +5i/CIXpkUKk1M9KrQQcQU8wGcZQGtohOtSFWSSu6haDDfOVthDC+78eUBo9kRG7A +xBqnvFRLd5xi5QB5GJcosIP97SkKGvUctX9z7YVcIBpZn82PGXQndjeK5Ud+M/lo +dg4GY1g3cxafkIKj90hiiN+T16h4e77VTkyZ3sz5coO+4YEGrAOLdriTTyt6fz2f +Rx2waoIgx2uJAhwEEAEIAAYFAlQEroQACgkQ6UHe2nxbtqUWoA/+PrZXuQYH+kPM +H3XqpbWRC9yiA4t7BjoQ5d8P0v9YrqXTr5CbA1sQG7K2+jBR/bfiOl1zVpeBXwK4 +mUUSPKVGuQR6yMFw6PzmOF1n1lESszRI6/OBXP7rEO1bHSwO34u7gfQwxTP7lSiJ +fBki4Sct7HMoaYp47y2ejzv3/79dxv+86ps4k2WhX3l/FkdGcb9KgneQRIocRkFo +0rZNhCs60/z5mhbxcGp3DUPZ3I0/EnKztpNvbanOrtPUAJnVPWNRIhKpwYul2eAD +Z978CqvOr73dRTWJWSWitYp6U6BNL5EszIcyYW3GCJhCTfDCn9nOO057xCvCKpTQ +M9F8aXp+OOWY/x+gR5Ah42impIyO0gBO7hkJzILZ8Jx8eskguJWymZNIP90li1Vx +Rv6WgHILHAbmjA18ZCqdgVyhI5t8aENPgR/MhMiwJvtM4dYuofkwDYwUKfukqjOe +cxEAuVjMZjAj5KICicVUI5QwKss5sGKlpIkITFvYT2/vCyLk+cQ5yey2LrhASdRu ++eA4G09Ns9Kt6+fBtG1WA8lcs2BJi9Wv4QNensxWVvHz0s1mcvKrSFRoNaWiqqr5 +Yx25ad1y0SWbuy0FN6u2R1fETEFr8YVnWZFqx0o61llG+S4bcpazTDSeGZlJs+R+ +JeLdXxFE0nFR9qI7xnIWCi1dtQ8GWEyJAhwEEAECAAYFAlic0mUACgkQLGRkryqO +TAI5BxAApWBrfTucOPzB3oEZMUZ2jnZVR1FcC/15uKr9JX5nhAwS9Ux6kAFNgz1Q +JzYTZFmMU5s1NGEN/oIDIj18Kq6AqvyxxpFkp9Qp6/96x5fvU0KZmzsFSNhp6CON +fjMLXBK2yaATj+8aiXnWhRjMrb+xQKcIBe8iDStoDeD1LC6uXQWZCxipH41KnoD7 +XN0+39snPTN4MDUq5g7hoUREigGkn3tgn5EzwG281J43hRZIA1NFNFg73tC6AKWz +3r+BOi57uohpFkJT5oDpsBWrEDbaffZJnC02v6wUBixbu8vF4AQtofkws2x7UhHd +Gf6lK6CG/0RrH3Ftl+pwhchRnN6mu87Vvd8O8cMU8yQ/lrxCPuqTr3n/b43LzWWi +ss/EBFOOaLD7nu9CIGN8LxRbAzO5N1kGPC8NDjg9ceXGRIwse8BhDxMH6lkD8xT+ +XQ5566sqV+ovPeyPu/kY0s9aMhkC6OFIKNGzodquKSDpBaJSKR3tcEU/fXGpjPp+ +I0DCvxg6DEVlueWdd/doh3pM9+smwyJquPG0VCof8FiGIQ7/nkQ0z+ikFFjTQUxm +OprnYnUPZYA/y6xVXvN1BnwBsb/YvCpQGyp6SmRgNdpYJzv6kakpk4hkQeQmQpbY +8xWEesmdVxF2tv/CxGFRVfJz3tgM0aEml5oZ+5XLjw3UVB21LxKJAhwEEAEIAAYF +AljimJsACgkQnFA+4pt5PsXzSBAAvrpN34vvWV/odVK1Ea2aap6CVCkVo2y+IWww +K34t4wwq7JVHxSlNy4zpbIQjPjNf7YeDX0kB+2ePqnIJN//u6kjnBNtU1xFk3tQr +zoK4bB5z6q7189F4urztGreECPar6Rrf2AKAbN6sd5KG1R1l4vXhhcG1ETrc9bug +kD/SKccfid7HBGGcXpb/qhtTUhzPMns+IytH0SQwn5DFyarlaAP+fxrfaHFbJlmS +4dltU9MR8Xeo0i0NGHEHGMBlCU9bldcIIU/8iR4JB6cCJjkREsF4Z1zQBg3y2uNk +F4nwgP7rO6Aeomuf92NG/LYAIB7iwrJpuFhsxh8ZYikKdOg4WNDElXsXQwfvQqIb +5QTCm4B/lcupXW6S5iOzzL0rBsxhR4QliHi2ibHHT/CSkOLyWCGOCIX9y4zLicwv +A+FueBO9rxukhIoF/J9Vxe5rRahTOoXlv/8wWEgAOIlQ5FLzALoHTBEGmDQuut2y +8swcGn7OrdSP6BxVStikxjApJWm06zSDxo716+PeLK36RtbFFX2vR0OsuZXFwbfK +6yDZFjFNlyubnbaiH1YsvkgbKkHxhvQ0Ncfg5rf5H5iR88bHv2WCrVN8qoHeKOmK ++fVHT3NmBokIpb4XwOPgdn2+3VguiZjxf0Vd3rZ3+cojpXWFimDG7K1Ht2oWQbma +eFDY+AyJAhwEEAEIAAYFAlkeq8UACgkQhz3qMJ0TgH9kSxAAoaDv4nMTaAe632wo +6peEhRv7pfUCbuLCjXLcMA1w64FsH45k94wZeeOqwNN3CdeADd4gpqUsta6KoPmV +fDphz0lYX9vz1LEbzMOK5mrUwLXyRNy6sgO50xfBLY9rjaQApzeaqc7JSGLNvUPi +clnoheo7AAy32oQf/O+ubW4eJAmKWsPstxFRET5snwoSWSm7Xx2Q5HZo1x5FureU +CFrX6LVyYJTF8v7C4vMRfsPNjXNM6URqURUxYuEcWpPBzhiM5ZyYlm+2PRCSi0kc +QvUTf9ro8DqRTjZP39wIqOb/Xpt/FyO/Rwkh295477BT5tUHuCh7bKDf9B7HzE7o +Ws/LbUPKn0B1lUfwCTQmvBZu0WGEOjw9+wXBO2ucBqGfp7UaVWlSzismy0lC8jFq +Xpl1hGtp5/s5WRyxjMEKVa7x8TtNahJmfWUPf4bp465Kpjf42LYp8VnPL+b0sb1+ +I36DS8qfviDd2zMXyr29DDmXaDx/16nlLsNNe47NU9/jA/5B9vn52eS7IQNyqI9+ +bySI5oV7g7x34D0P+9WENY0De3V1QAsDL/oOQjtH5cK1v/thmK6TWVHm1erb4kak +BoNz79U8/HsgiCZSbPDsvCnzdACXb8fuqRkUmkYYZd6alBGiyFBzFNFwIIaDdg0G +FAy3qkIcSv9p4W4ViyEpkroxDr60JUxvaWMgRGFjaGFyeSAoT3VvVSkgPGxvaWNA +ZGViaWFuLm9yZz6JAjcEEwEIACEFAlQAZkECGwMFCwkIBwMFFQoJCAsFFgIDAQAC +HgECF4AACgkQmS0js5L55PIAKA//SL27KyLfbsNAEDYYsVpyhxEDaZbrdnbDCCK9 +9MYz5OTmfbpng01bp10PS3qzGrJFaRC71FJuYFWxkLxatP/OPSmEiEZFROFAWs5w +TbgfaAQqQF7KLksRKJRNeglOiaULgWVDO+aHvXjfkMGm9oGehHYv4oUYNe8HrUrN +lpKkn2DoNgy8FdvhXSm2OhZcASAtB8d0+VSoFmwNGTq1LMUkne1DAVpSHsC2ArfW +rFlskeN4gB7jl1thiV0Ep3dfr/adImjVtLgvJgnSO+DEuqg98c/4QOZedR5BBQiA +aaRPclvSVDcr/OQrbVwfagl99PKowfPzJT9xW6Jy6L+FHLcRmWw+2dVcQzzTk/32 +vQnHDzhGYiGLG+Vqroe7SWoTiT0pUfZ4fbf/vXNtuxEK6gwvxgnHSo/6yKX1M8dK ++v73j4kXG6obDJ5uPvuub7NcGYYHv3zRvNDW1e+CEVF/Z4O6lMF3Ng+No5CXOzIj +/INHzyy20/sHsg75omFpBvV5fcBqdLkXxmEfTcUoQR5tZSaha/i1qpsS66DRbMuE +TGUz8q5Nj1B1Uj9MTTn33/unTzBRMocjL1WTYcXsd4RiEmh40mIt/q6GHmkMI0TV +9IQxFmjWeJumrYUbwT8Ib+/GHeWYIDnSuhO6JDfL5F+VD3iQTHNYEPGbyfnSpCAC +grfHrfCIRgQQEQgABgUCVABm+QAKCRDx0szISXoXbRXpAJ9lKhoceJC20XcVNDVh +l6NK0wtMswCeKwJLXKv7i1Ueby9fCqlCeFYCvLOJARwEEAECAAYFAlQEjU0ACgkQ ++erjplgZ1+j3JAf+NdHcvxrbH3yy7TWkHA+r+dBGRtg1vv4esL9tDF97A94PMPcY +3KteXOTw6mR60zz0/hCU6cn8BHOSgDly8WOHF/1nhNB2iVKxQ8C02NJFQeymWRoO +dYO6ePYGDphNubuRKLUbQvPsJFNT8VPpoh7WdlfKRqseqKWMkpFTSq+gfH5lkFNn +xpVQ5zAqFIZIM7C6qMZ9wZXJqbwcdD/PhfGJTWjX1Bft2URdBeGHuouA5QSC7mXm +iLS/TXzR5J7PFS5tk6Xs0QNdpYk+Z4U1FbBFl7RQyh/bKUuzMvkZreCraaH7a7Kr +WISMm09MtbTIpiZ+SpdGQY3916bUJjExBk3cjokCHAQQAQgABgUCVJShpgAKCRC4 +5Qh3Zkdarx9YEACtFBpjvj15FqumoU2+8B6ohrXJ7rvS9K9NNzvGkcS3+n5rJtt9 +Z6oUug5EDRWHJ7vV26/yzR6f5F+yun78TQZ0H6GEQUNnQtsMQ527CgxmRPFo2m1a +HQpJBS2C47JinF8Pt4msP+9639s1X+VIqXasF57z9uQ6ezb5rhIyg+n64P5d3Jc9 +o3Hy+obbXHgJ0IVQfACGITT1DWJNgKpfs70if3HlUV/Xznn+e3kC+o7oX6ACYR4g +fO4Gy992FGgA/Yjk9VbnwbXjFeugV71TdWTNeLbXPhjt7wS/PrYcZqer3Tpj3K3y +GdhJlAghXbhJENtdfsNzN4SPI2lDEzIimN0PeABf+9ldbKLs+6XU2yXNHdg2cX+k +uPddkeK9Q2TrZIFEZKGjOnQE59vyDC29wBjZECB14EV1pG/2CrEnkBwU7poeBqd4 +N6/l0JCCCk77s9DH5cUtlJH7qDmrqY3Duv2snYNbYfl72TOsSFLM+/fN4Ze8eNnk +TeTrsQbrkOJHU9sOjeuEN9GNeavcRIU1aRIIRofE1YpdNbzk022x4kvV8uCFAby2 +gewucmgWDFTID7KfQClGufAq4p+jjq2qfsGYI2pV+b/pl1nfbNgeYBFD4bJpdyN/ +JPcloThSjc2ui6Id2yTFWRQeEWqALuq5easvKTjXiNixHLVtyxstt9gZ8YkCHAQQ +AQgABgUCVASuhAAKCRDpQd7afFu2pfKkD/4wsbCOka5+BtHjQCTOnk6T1Ld26i7y +XbOq7Gnzim+t8ViCc2jNwVpiDeam93OJ4kIJFOg9dUzp9Gd4WOyfdP8hsKBpTxCk +WjORa8sExpJHAc3F8zhIlUgFrBwOJsJ3hZz7MD6PLF1EEo+9bsbOgLi65iLT9yfk +WNFJdYE4OB6Hzqwch7DrtYfQpcPNXPfwMnKcrvMnMvHCMRCaJmEt/qI7hrnnNwF7 +h4QwxvH0fvkzq48nryjbiCrB3GHaD2tzrOTZgIAYcR4iVLb9PfAJ79uTvZJlCOAU +livWTo5gPHO2YjmdQnI8CPqQbl6M8fmLLdvePOsS+icO36bBGC3+ZlE0UbUw5M4M +Hkzza6QEYL0MG2I2lkrCJKzx8BcoO5lAZy9cP+mXCBJDtIZ2StWGMmszr/egjlDI +yE42/W4nIXE1qFPP366wa7wTO2hXHvH/AUn3oEPMtJsQRgRG9aC8C0GJ+ufepk/K ++9aoxo/TsZXgWax+EvFhRZ/TeXW7ZIeKGP4U4rt00rB9M5XWvbkxVNNUZ+2Wb7r0 +w9kAZ0jtqjpW6SHgzaJ5btzvgLw5zMyMIuBq69RjTqOHbtZ8i95w6iKAyDPKOmfd +c/4d6R0XmCt4utSn/fjsvf9ZWKGBrIFRi6UwBcXjcSGy749bbnN4RdVtAddBAzqz +vZFidRMUtEgsI4kCHAQQAQIABgUCWJzSZQAKCRAsZGSvKo5MAjtzEACVY7ze4P7J +qhG4k4K+2AVTNFBECsQM36NoVIaPahUbG83RJ6yg7XmeVM6zsVT1vvbFwrRh/i9/ +nzSoDtZabWtxkeLbVIzUKEtVz5ZHdCaAhu5ER+r6Zke4S1xOdiERVWUJvhcnVeSF +l3Kgae3WNoMOayXQNZyLiyC3UnpQZV+d+rNPmLEja9NTrdxx02QdMxOlNVORTtsn +wcIxUTagAL134HUu2T2QmHBae1f6JsnK8pgLNvmCDIM3d0O14mdiA1Y4wJGwjDGf +znaLl98xhTxI5Gf8ymC+hMuLcyg+b4yqoEH+8eIzBkfI5GbBsBuswcXId+7rZSit +MlANektOpwJQ7wXhSofofwxttkYHeTJWBDpTj6TkoO4y5UWwATXGg2rapcAQy5Ek +yQV0ZvBIO7UQPpiEZQKALABxcFLerVXeyutcXXNhtgsOZboAlB8sbDUsE4GHgcKE +iLa5gRWAvod7LmrAb4k/78ZjAVdCp6bo8J9aXR8sJdlVOM8Czqa0XX20h3LXkiuV +Xv4+kwb4lU6w/OwKzDZzdOv5JQdV6/AYYYsGxgZ2EtV5tAzTn5vlVPYd1xOj9OHA +2hnZwExXkWy2huByJkF7AYzZu9nEykhGwU4DrXr9EkOAsTmy/EC/En+4Ue65jk58 +m+c01Mh5vg+9TcS2dAkLUbUC6Fob/dB1XIkCHAQQAQgABgUCWOKYmwAKCRCcUD7i +m3k+xRQwEACqhzGR1UjUgn8Lwgu0kupBmb/gTlyDk8CK6kALMimpf6AbDnc9yaxi +uvFXX+HgGr7nT7FdcBmM2a6ZX03xNqFGNNxdi5gLF14iLyo/QqBD2D7rVWCDLphc +PLBvgb0+dP5/AdK7DYQkppFJ83lWW1citmKwCuWyQ3+bn8D6CqgwEg/U/TO7FalB +oMaad87WQ49YN9sFrkeF4wILsWO1lwMSVDhya8+/m9hxNvSGMh/twAP3DB8qgySQ +AfxDdL0b+Al17wSBH38xSZJ0ebga03vCVwCfhSgjbGHZQ7i7t0SWJhq+ROlib/lb +s/Lc22DCK0l26lR0Fd7thMyX7y1nSsXeVNGZ4jfcmfYUwhsky0w/eB67GYIliWj5 +U+elcrCfXIQd36ClEVRV/+rR736mzx7/bIzFFshRrCThRwUgpF5/x890nRVtvRvF +hvFDIYUjUXru6vZJBQx3BjW2cTCvN6XngoVWbrleRUU2jiLujBihUdhm6U1BGyHW +BmAQ2O5y1id0Skpr5r9DDIXrFeBVvq91mJFMsj1xotIXELKf5D7JX4qrR0w02TVl +pw+OUrbSWMqzb/GK/nap8XzNp8jR/drfTsKXfOS++uJnIe4cry53FRZXBUlLCDV7 +NT+TucdmqdObGsPNOCZLX7cAfl3/R01CuNT3ur7OBKRiyuKbGzt4cokCHAQQAQgA +BgUCWR6rxQAKCRCHPeownROAf/BGD/0Va+RV+/mCK6bMGkhGiscFRmGLWgNKxsAA +2JIT33kLiL2Tn3xdYSkdYpaFmtPxjfOs0KEvDPvWqJyZOFARKv/8BPHGW/+wPlkZ +ON+B2wW7KzuKvN/p7xOdDrSTMZ1WXD8jbQILUNLzvOivTnr7FFCV2e0c+jlrTW9W +nSXfcr7HedncC5aqMLb79DirqNEqgsjDUwKLw2hGzeXNs7YFXlbibcrR5KSK75Pl +tlOiYW/bXsMiOrh1KGP81Mr4+fbn4L0yNGiuHGJKxn9skcajrYSzlgKZkURd/9yk +32NpXpBq8d0eV7l+dZrr68HftsVWHDQbMcvP1jZxtJBkdC7mmk5S92a1VHwAeRFS +6RSc4ge5QEOfowMJsth5R27b6xqiDuPQ0vqwdgYGyD5XqzO0oMTx3o8p0VG2m1VI +ACgqPV0AOepasPBnntOltFZZxJVZVSkgXeMADXjXeTYOuH5iDJnJH/bNUgjo0uD9 +dIkMBJSzd3TQFMs4NcqmHmgDvtH5+wgG5UcdpPOTtGJ2L/YMhqgqy5UqiSPB4Uek +eRVxBUhjCZz2qYWCZfgBzyQu6N5jfdbTvZdLcP+oseWcLCBPoMU5RSLILxFPMHl3 +W9+H+jGzq20u6X1u3Z3zJGxPoxaaZkAL8kIwR+E8uAWywdquljAX3mc1yzJTBPxd +HnV9lz/TdrQhTG9pYyBEYWNoYXJ5IDxsb2ljQGZyZWVkb20ucHJlc3M+iQJOBBMB +CAA4FiEE6Z/+g99z5y+2smTtmS0js5L55PIFAlotvG8CGwMFCwkIBwMFFQoJCAsF +FgIDAQACHgECF4AACgkQmS0js5L55PI8gw//f6F/8QdzVDopGcceaDM1AsPQpl5f +JUjcmJwZLhXR26/+wIZ0CcIZNc/Gys/qTALuWb3Eeqt0sDJX+W76ZxsUYWBk9hz9 +175kJ9IMQ2uz/+JDQRC2TkU00cLcnwbV7gNqy+MQWoW2hLLJP4MXdEIUs1ABNX/l +k9FJmBfV6w68X4IaJKsVTmo5IBumJKCAyxpGLrgUSDeYZEfGkObKlYxk9Rpw5OlW +hhGf3WyFER+0etWlGn5DjW+UkiuiOCQ0gw+50maluVG3+Rvao+YCdR8mv5tsMLPy +KOPKrnc5ShZavDQ3zcOHvoI+GSV7pYDO6VUQyhxjUjkLU/8atLXsL3CTeCp51g3i +eE4nHtef47AIZRo170zOqbpeH7hsgrGcsWhIIkMFnscDAOEaiIwWXt/ZGSBIhKGM +l1HcPMIDTKLgBONUinFupGpHqv24swwt3b4M4u9d0Kn43ckuOa42MYKlFmyHctO/ ++FDrRnJvtiFH5eiXYYHGhdTIzfsoPcDPeGqgDgOjjdS/XYSbufLY6oHoCHPuF7yF +m/lecSQShQqbzY3bCtN7RMOEvbHR/BYcncXgy97vOSp/4cefK+zw1Rk3x2B/18d0 +GUn3gwpxMsog/aAKdhmftI9cTooeMlGF+NhQxYgC6s4cOQ976AFBLn05xYrd8Z0e +hKPmFpwExoTyEq+5Ag0EVABipgEQAMnduvH1p9HE0nrtnWlddBCmxr/uZ+JcotfV +z1XrFj9m438FiKy1qGlb6JXaMbr8vBAqqR2Pu/PfFjNl21zby9Ek7kR9yYYlZhem +ksObehmIX5YxICUl5RvXDjfLSdm6OR9YbpCi19Wg0VXsEkQ9v/kXVS7D6XQXAQUl +Gu+TcVqltfwdtQvTha4qQaTXnEVPwdSwqI3tHqA2kALWG/c93HZmeuQs3FX02MDT +LN4uBR9DeYfFGGnIYWt71UeG2aXf5axyOi03XZgKDw3zpJGwukdVH7tMFtfT0W0r +rTVDkMdtSXMVrEOHpT3b0j81Z3Ma3fbarSPKgTGr1eSwF2WQBpNrxmqQkLawyzDQ +RPzXN9ge31k7lSkUFUfBGm6zJ/SlPnmVHxGNGAInozXN+VX+AjGKGvTOvT9qUXql +GOTMeCCNQLgH0QexfUfI2vpk6MGtcfxNKhLGNPi/T1HCy+QdFv3K6qKtq4hDZ6mx +FfB4eO8xRa0GYkI5/cPY46XiBATxBx8A1Eo+O/7vUrCBONAkxTCQdas3M/0mY2Kf +lmuF3tqNp1wsRRVruVD+ghcUNU0uJucDRLfrSqm2yOCM56LRlFA7uD9fpBIUDV7h +u1DRlkIhM9d+CaAYDdJQ5PsT9ddk3h5Ej1DRDTmCfCxZyjpWR6CNXyoVVeLX6Al2 +Hsa/EVS3ABEBAAGJAh8EGAEIAAkFAlQAYqYCGwwACgkQmS0js5L55PJDDhAAkEzd +5X/791Yazg40tVQvg9LsM3Ak2lyOzxS76OTCqIbtdhh74uFrTSrPVIKIf1gFxv6J +KzjRk8KRmzzKqlLGy/Fmd7j6C28bsbZNVqryRyJ+zFTsaC72x5F6wX1RTvwLMIO7 +RVXwFkKKegIrjMr9n+mpCL8xs2JVPEM7Z7U+BrOQUER6QLWtpFnnMtPUI/k03Fav +K4XDI2AI9+FH2KZ6Fb0vjKT7Y5ToeH1LJz4FS44sZfQlA14BU6JvsxsuT71iCgRG +B0ChxujLBNTqp5Qvi/xNv8bg1Rr1Wrtv9wohQ5JbDtC29khmbMeEg3YRU9aO2/NY +o1oaJOaVm7mFD4QyD3Ek4mgOH/PaN9K/V9ZRjR1vacybBv9aF8ngRaf4glmBSdz/ +yK121bErWfqsQsdH3WxAhrKgF+lojaoaIZO5Sj4+y34jEbafxd2v9j594bXPSqvn +jRqI9WG50Fll98PNqzq5hqjLq/fOhREin/QrYc2LZ+P2+VTSsnRU6qaokuqo+T+p +4Glvij6RGKNZ+A/Ogv2QV8in2BHvQ57fAlSn9VLdzUnQ2jxmhVNKmwM+ITIjxkGS +Ya0qJXNVRHKMfHFoUWa5Zyck+IGBKgLUhJ4gs8SKj85zRbLvrQydt5QUVrO9319Y +DJU+9WUOomFaLd0GpnVJJhXofslisnu+0AFLLlo= +=WumQ +-----END PGP PUBLIC KEY BLOCK----- diff --git a/admin/tests/files/ossec.pub b/admin/tests/files/ossec.pub new file mode 100644 --- /dev/null +++ b/admin/tests/files/ossec.pub @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v2.0.19 (GNU/Linux) + +mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t +wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU ++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM +KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB +0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg +u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2 +ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B +5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf +CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7 +hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf +yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB +tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP +RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS +m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r +FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/ +JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r +q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn +5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg +b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi +AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW +iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3 +cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai +R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj +EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj +LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6 ++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC +vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq +95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf +T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls +9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC +KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p +N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21 +OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h +h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls +Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f +FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA +6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3 +inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA +KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn +IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f +4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK +n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ +BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF +z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI +k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS +iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ +Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8 +=XVz8 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/admin/tests/files/sd.crt b/admin/tests/files/sd.crt new file mode 100644 --- /dev/null +++ b/admin/tests/files/sd.crt @@ -0,0 +1 @@ +TEST FILE ONLY diff --git a/admin/tests/files/sd.key b/admin/tests/files/sd.key new file mode 100644 --- /dev/null +++ b/admin/tests/files/sd.key @@ -0,0 +1 @@ +TEST FILE ONLY diff --git a/admin/tests/files/sd_admin_test.pub b/admin/tests/files/sd_admin_test.pub new file mode 100644 --- /dev/null +++ b/admin/tests/files/sd_admin_test.pub @@ -0,0 +1,30 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBFsEIdYBCADTtq42Xsm5sbs9AkxdgN9UA2FQFrpzlc0l9Q+FMZp4WJKADqQf +4G4RE+Sbw0YuzhnWOpn6OdKsLGUO1bdO8QArU+gX5e63E/RkXvLjV8tc1RbtNaZS +seHOCau8XKgsT3CfSIiXCju7TyJoTarrbK//A0tM5Lw7WDmwHQEVmKbJATVripC6 +7IOTN0hAxo8Bur+lVvn4VKh6AW4H0eA2QA3XzySXMrG6Fj6JtgRISZP5J44RI/AS +lHx8pyO6vhO2jTtnlEByDzRd7Hj0Gpa9KNrxV827VcHD1Ql9dxrN5FwpUyRFBiNB +92U1sjctjpe/21+KBm8BslehAudv56n53+3hABEBAAG0DVNEIEFkbWluIHRlc3SJ +AVQEEwEIAD4WIQQfVEsxyEXWmOsx8v82TxFi0y5+WAUCWwQh1gIbAwUJA8JnAAUL +CQgHAgYVCAkKCwIEFgIDAQIeAQIXgAAKCRA2TxFi0y5+WHr3CACbHq2QmnH++E8r +wf9ibauGKo49osS0PDz//UtNLx0vK7mOcpcvaMyKQhX6fEdkpivYCMNLH5nBHTad +51gtPqI2nlA7XMX8F0VKtb0I3NHrz9a41CiIdrBzJQDld2OJjR6HkZ7IeHRZaH+P +yq87qc39sbFaBXZPPnvWvXG+Yt1qMx7Dt1BII62Jxo3oNI6hCHNr2nJ/tNl2PUjU +5JYcVN3eAhM2P5NRlVZxurNBWL7PJAs3ErUDGusM0JZY/TVqQqBZfW/2YnQH4Juo +kr+zSC+86+gG9ZQqftcGW9gQkJA4yLYb1SF9XpMY//2/iVEEH2B9nSoSEpj/Ks77 +CNvL4pdYuQENBFsEIdYBCACnvYC8SvIhJZwBxF7+aLpPi9olJuQ0NwQX0y/OTfhx ++Ku1HL4M/+bTGX9ml7T4Dx67OnJyw8/oYuJPx4mEKhKuTaVKGJkCeuSeSasAP1E9 +XKAPryMpFMJgxtb+mdZrb8twP9ri3o0nRFBrLeXE99tuz6RHzl7Si4qZHXUvmxPK +azVv66hmvnGo8FEUzKnYx4TeVo0YXH/Kwzi6zxWPn95zZu04FwC5QO+XjPp6mqOZ +EjZOyTmtj2E59JqPO01R9/lxMzLOGWOsr+QGGVHOOoVJdGGrCEJcI0gCnEZovZvu +r6A0gFx2rnqyCW1yTR77gEMwluXzTvBQu7pmssrDTRITABEBAAGJATwEGAEIACYW +IQQfVEsxyEXWmOsx8v82TxFi0y5+WAUCWwQh1gIbDAUJA8JnAAAKCRA2TxFi0y5+ +WKlOCACBQr6AH1qbd5ykuU4yDEOUJ/tE2675+WKwJo077wDrnjOVO1pIEVZ9cxBy +YL+CMP14GpuVJM9KupVunqPXab5l+H7QiU2wKwV/X+/GNBKG8/zt9mRZ54AtKKX0 +okfMhoXcg6LhfJHw9sWkzzkd4XQeejr8IxVB7lrq8C6ddNH7GtzVG7VfxfdtGqm5 +Jst4c06IR378OJ/O80fNGTuqqytgPdZtXccfmlpRd7GmxqjjeWacOsL53CO65rQ1 +bIGOQqhRqY+xzNITHKMmmXOhv3znCEmkuic88m4PRMYa1FiULnzVN6MCUfJblGkP +Bc8zgTA2zM/IHIKrWImML74bjhe2 +=VjwG +-----END PGP PUBLIC KEY BLOCK----- diff --git a/admin/tests/files/site-specific b/admin/tests/files/site-specific new file mode 100644 --- /dev/null +++ b/admin/tests/files/site-specific @@ -0,0 +1,20 @@ +app_hostname: app +app_ip: 10.20.2.2 +dns_server: 8.8.8.8 +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: key.asc +ossec_gpg_fpr: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2 +sasl_domain: gnu.com +sasl_password: passowrdok +sasl_username: usernameok +securedrop_app_gpg_fingerprint: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2 +securedrop_app_gpg_public_key: key.asc +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- en +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +user_defined_variable: "must not be discarded" diff --git a/admin/tests/files/site-specific-missing-entries b/admin/tests/files/site-specific-missing-entries new file mode 100644 --- /dev/null +++ b/admin/tests/files/site-specific-missing-entries @@ -0,0 +1,14 @@ +app_hostname: app +app_ip: 10.20.2.2 +dns_server: 8.8.8.8 +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: key.asc +ossec_gpg_fpr: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2 +sasl_domain: gnu.com +sasl_password: passowrdok +sasl_username: usernameok +securedrop_app_gpg_fingerprint: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2 +securedrop_app_gpg_public_key: key.asc +smtp_relay: smtp.gmail.com diff --git a/admin/tests/files/test_journalist_key.pub b/admin/tests/files/test_journalist_key.pub new file mode 100644 --- /dev/null +++ b/admin/tests/files/test_journalist_key.pub @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v2.0.19 (GNU/Linux) + +mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t +wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU ++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM +KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB +0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg +u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2 +ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B +5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf +CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7 +hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf +yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB +tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP +RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS +m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r +FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/ +JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r +q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn +5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg +b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi +AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW +iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3 +cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai +R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj +EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj +LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6 ++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC +vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq +95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf +T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls +9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC +KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p +N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21 +OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h +h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls +Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f +FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA +6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3 +inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA +KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn +IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f +4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK +n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ +BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF +z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI +k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS +iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ +Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8 +=XVz8 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py new file mode 100644 --- /dev/null +++ b/admin/tests/test_integration.py @@ -0,0 +1,840 @@ +from flaky import flaky +import os +import io +import pexpect +import pytest +import re +import requests +import subprocess +import tempfile + +SD_DIR = '' +CURRENT_DIR = os.path.dirname(__file__) +ANSIBLE_BASE = '' +# Regex to strip ANSI escape chars +# https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python +ANSI_ESCAPE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') + + +OUTPUT1 = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: '' +journalist_alert_gpg_public_key: '' +journalist_gpg_fpr: '' +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: false +v3_onion_services: true +''' + +WHEN_BOTH_TRUE = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: '' +journalist_alert_gpg_public_key: '' +journalist_gpg_fpr: '' +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: true +v3_onion_services: true +''' + +WHEN_ONLY_V2 = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: '' +journalist_alert_gpg_public_key: '' +journalist_gpg_fpr: '' +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: true +v3_onion_services: false +''' + +JOURNALIST_ALERT_OUTPUT = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: [email protected] +journalist_alert_gpg_public_key: sd_admin_test.pub +journalist_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: '' +securedrop_app_https_certificate_chain_src: '' +securedrop_app_https_certificate_key_src: '' +securedrop_app_https_on_source_interface: false +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: false +v3_onion_services: true +''' + +HTTPS_OUTPUT = '''app_hostname: app +app_ip: 10.20.2.2 +daily_reboot_time: 5 +dns_server: 8.8.8.8 +enable_ssh_over_tor: true +journalist_alert_email: [email protected] +journalist_alert_gpg_public_key: sd_admin_test.pub +journalist_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +monitor_hostname: mon +monitor_ip: 10.20.3.2 +ossec_alert_email: [email protected] +ossec_alert_gpg_public_key: sd_admin_test.pub +ossec_gpg_fpr: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +sasl_domain: gmail.com +sasl_password: testpassword +sasl_username: testuser +securedrop_app_gpg_fingerprint: 1F544B31C845D698EB31F2FF364F1162D32E7E58 +securedrop_app_gpg_public_key: sd_admin_test.pub +securedrop_app_https_certificate_cert_src: sd.crt +securedrop_app_https_certificate_chain_src: ca.crt +securedrop_app_https_certificate_key_src: key.asc +securedrop_app_https_on_source_interface: true +securedrop_supported_locales: +- de_DE +- es_ES +smtp_relay: smtp.gmail.com +smtp_relay_port: 587 +ssh_users: sd +v2_onion_services: false +v3_onion_services: true +''' + + +def setup_function(function): + global SD_DIR + SD_DIR = tempfile.mkdtemp() + ANSIBLE_BASE = '{0}/install_files/ansible-base'.format(SD_DIR) + cmd = 'mkdir -p {0}/group_vars/all'.format(ANSIBLE_BASE).split() + subprocess.check_call(cmd) + for name in ['sd_admin_test.pub', 'ca.crt', 'sd.crt', 'key.asc']: + subprocess.check_call('cp -r {0}/files/{1} {2}'.format(CURRENT_DIR, + name, ANSIBLE_BASE).split()) + for name in ['de_DE', 'es_ES', 'fr_FR', 'pt_BR']: + dircmd = 'mkdir -p {0}/securedrop/translations/{1}'.format( + SD_DIR, name) + subprocess.check_call(dircmd.split()) + + +def teardown_function(function): + subprocess.check_call('rm -rf {0}'.format(SD_DIR).split()) + + +def verify_username_prompt(child): + child.expect(b"Username for SSH access to the servers:") + + +def verify_reboot_prompt(child): + child.expect( + rb"Daily reboot time of the server \(24\-hour clock\):", timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '4' # noqa: E501 + + +def verify_ipv4_appserver_prompt(child): + child.expect(rb'Local IPv4 address for the Application Server\:', timeout=2) # noqa: E501 + # Expected default + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '10.20.2.2' # noqa: E501 + + +def verify_ipv4_monserver_prompt(child): + child.expect(rb'Local IPv4 address for the Monitor Server\:', timeout=2) + # Expected default + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '10.20.3.2' # noqa: E501 + + +def verify_hostname_app_prompt(child): + child.expect(rb'Hostname for Application Server\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'app' # noqa: E501 + + +def verify_hostname_mon_prompt(child): + child.expect(rb'Hostname for Monitor Server\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'mon' # noqa: E501 + + +def verify_dns_prompt(child): + child.expect(rb'DNS server specified during installation\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '8.8.8.8' # noqa: E501 + + +def verify_app_gpg_key_prompt(child): + child.expect(rb'Local filepath to public key for SecureDrop Application GPG public key\:', timeout=2) # noqa: E501 + + +def verify_https_prompt(child): + child.expect(rb'Whether HTTPS should be enabled on Source Interface \(requires EV cert\)\:', timeout=2) # noqa: E501 + + +def verify_https_cert_prompt(child): + child.expect(rb'Local filepath to HTTPS certificate\:', timeout=2) + + +def verify_https_cert_key_prompt(child): + child.expect(rb'Local filepath to HTTPS certificate key\:', timeout=2) + + +def verify_https_cert_chain_file_prompt(child): + child.expect(rb'Local filepath to HTTPS certificate chain file\:', timeout=2) # noqa: E501 + + +def verify_app_gpg_fingerprint_prompt(child): + child.expect(rb'Full fingerprint for the SecureDrop Application GPG Key\:', timeout=2) # noqa: E501 + + +def verify_ossec_gpg_key_prompt(child): + child.expect(rb'Local filepath to OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 + + +def verify_ossec_gpg_fingerprint_prompt(child): + child.expect(rb'Full fingerprint for the OSSEC alerts GPG public key\:', timeout=2) # noqa: E501 + + +def verify_admin_email_prompt(child): + child.expect(rb'Admin email address for receiving OSSEC alerts\:', timeout=2) # noqa: E501 + + +def verify_journalist_gpg_key_prompt(child): + child.expect(rb'Local filepath to journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 + + +def verify_journalist_fingerprint_prompt(child): + child.expect(rb'Full fingerprint for the journalist alerts GPG public key \(optional\)\:', timeout=2) # noqa: E501 + + +def verify_journalist_email_prompt(child): + child.expect(rb'Email address for receiving journalist alerts \(optional\)\:', timeout=2) # noqa: E501 + + +def verify_smtp_relay_prompt(child): + child.expect(rb'SMTP relay for sending OSSEC alerts\:', timeout=2) + # Expected default + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'smtp.gmail.com' # noqa: E501 + + +def verify_smtp_port_prompt(child): + child.expect(rb'SMTP port for sending OSSEC alerts\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '587' # noqa: E501 + + +def verify_sasl_domain_prompt(child): + child.expect(rb'SASL domain for sending OSSEC alerts\:', timeout=2) + # Expected default + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'gmail.com' # noqa: E501 + + +def verify_sasl_username_prompt(child): + child.expect(rb'SASL username for sending OSSEC alerts\:', timeout=2) + + +def verify_sasl_password_prompt(child): + child.expect(rb'SASL password for sending OSSEC alerts\:', timeout=2) + + +def verify_ssh_over_lan_prompt(child): + child.expect(rb'will be available over LAN only\:', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 + + +def verify_locales_prompt(child): + child.expect(rb'Space separated list of additional locales to support') # noqa: E501 + + +def verify_v2_onion_for_first_time(child): + child.expect(rb' installed before 1.0.0\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'no' # noqa: E501 + + +def verify_v3_onion_for_first_time(child): + child.expect(rb'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 + + +def verify_v3_onion_when_v2_is_enabled(child): + child.expect(rb'Do you want to enable v3 onion services \(recommended\)\?\:', timeout=2) # noqa: E501 + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 + + +def test_sdconfig_on_first_run(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + verify_journalist_gpg_key_prompt(child) + child.sendline('') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert data == OUTPUT1 + + +def test_sdconfig_both_v2_v3_true(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + verify_journalist_gpg_key_prompt(child) + child.sendline('') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'yes') + verify_v3_onion_when_v2_is_enabled(child) + child.sendline('\b' * 3 + 'yes') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert data == WHEN_BOTH_TRUE + + +def test_sdconfig_only_v2_true(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + verify_journalist_gpg_key_prompt(child) + child.sendline('') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'yes') + verify_v3_onion_when_v2_is_enabled(child) + child.sendline('\b' * 3 + 'no') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert data == WHEN_ONLY_V2 + + +def test_sdconfig_enable_journalist_alerts(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + child.sendline('') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + # We will provide a key for this question + verify_journalist_gpg_key_prompt(child) + child.sendline('sd_admin_test.pub') + verify_journalist_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_journalist_email_prompt(child) + child.sendline('[email protected]') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert JOURNALIST_ALERT_OUTPUT == data + + +def test_sdconfig_enable_https_on_source_interface(): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} sdconfig'.format(cmd, SD_DIR)) + verify_username_prompt(child) + child.sendline('') + verify_reboot_prompt(child) + child.sendline('\b5') # backspace and put 5 + verify_ipv4_appserver_prompt(child) + child.sendline('') + verify_ipv4_monserver_prompt(child) + child.sendline('') + verify_hostname_app_prompt(child) + child.sendline('') + verify_hostname_mon_prompt(child) + child.sendline('') + verify_dns_prompt(child) + child.sendline('') + verify_app_gpg_key_prompt(child) + child.sendline('\b' * 14 + 'sd_admin_test.pub') + verify_https_prompt(child) + # Default answer is no + # We will press backspace twice and type yes + child.sendline('\b\byes') + verify_https_cert_prompt(child) + child.sendline('sd.crt') + verify_https_cert_key_prompt(child) + child.sendline('key.asc') + verify_https_cert_chain_file_prompt(child) + child.sendline('ca.crt') + verify_app_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_ossec_gpg_key_prompt(child) + child.sendline('\b' * 9 + 'sd_admin_test.pub') + verify_ossec_gpg_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_admin_email_prompt(child) + child.sendline('[email protected]') + # We will provide a key for this question + verify_journalist_gpg_key_prompt(child) + child.sendline('sd_admin_test.pub') + verify_journalist_fingerprint_prompt(child) + child.sendline('1F544B31C845D698EB31F2FF364F1162D32E7E58') + verify_journalist_email_prompt(child) + child.sendline('[email protected]') + verify_smtp_relay_prompt(child) + child.sendline('') + verify_smtp_port_prompt(child) + child.sendline('') + verify_sasl_domain_prompt(child) + child.sendline('') + verify_sasl_username_prompt(child) + child.sendline('testuser') + verify_sasl_password_prompt(child) + child.sendline('testpassword') + verify_ssh_over_lan_prompt(child) + child.sendline('') + verify_locales_prompt(child) + child.sendline('de_DE es_ES') + verify_v2_onion_for_first_time(child) + child.sendline('\b' * 3 + 'no') + verify_v3_onion_for_first_time(child) + child.sendline('\b' * 4 + 'yes') + + child.expect(pexpect.EOF, timeout=10) # Wait for validation to occur + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501 + data = fobj.read() + assert HTTPS_OUTPUT == data + + +# The following is the minimal git configuration which can be used to fetch +# from the SecureDrop Github repository. We want to use this because the +# developers may have the git setup to fetch from [email protected]: instead +# of the https, and that requires authentication information. +GIT_CONFIG = u'''[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true +[remote "origin"] + url = https://github.com/freedomofpress/securedrop.git + fetch = +refs/heads/*:refs/remotes/origin/* +''' + + [email protected] +def securedrop_git_repo(tmpdir): + os.chdir(str(tmpdir)) + # Clone the SecureDrop repository into the temp directory. + cmd = ['git', 'clone', + 'https://github.com/freedomofpress/securedrop.git'] + subprocess.check_call(cmd) + os.chdir(os.path.join(str(tmpdir), 'securedrop/admin')) + subprocess.check_call('git reset --hard'.split()) + # Now we will put in our own git configuration + with io.open('../.git/config', 'w') as fobj: + fobj.write(GIT_CONFIG) + # Let us move to an older tag + subprocess.check_call('git checkout 0.6'.split()) + yield tmpdir + + # Save coverage information in same directory as unit test coverage + test_name = str(tmpdir).split('/')[-1] + try: + subprocess.check_call( + ['cp', '{}/securedrop/admin/.coverage'.format(str(tmpdir)), + '{}/../.coverage.{}'.format(CURRENT_DIR, test_name)]) + except subprocess.CalledProcessError: + # It means the coverage file may not exist, don't error + pass + + +def set_reliable_keyserver(gpgdir): + # If gpg.conf doesn't exist, create it and set a reliable default + # keyserver for the tests. + gpgconf_path = os.path.join(gpgdir, 'gpg.conf') + if not os.path.exists(gpgconf_path): + os.mkdir(gpgdir) + with open(gpgconf_path, 'a') as f: + f.write('keyserver hkps://keys.openpgp.org') + + # Ensure correct permissions on .gnupg home directory. + os.chmod(gpgdir, 0o0700) + + +@flaky(max_runs=3) +def test_check_for_update_when_updates_needed(securedrop_git_repo): + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + ansible_base = os.path.join(str(securedrop_git_repo), + 'securedrop/install_files/ansible-base') + fullcmd = 'coverage run {0} --root {1} check_for_updates'.format( + cmd, ansible_base) + child = pexpect.spawn(fullcmd) + child.expect(b'Update needed', timeout=20) + + child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + +@flaky(max_runs=3) +def test_check_for_update_when_updates_not_needed(securedrop_git_repo): + # Determine latest production tag using GitHub release object + github_url = 'https://api.github.com/repos/freedomofpress/securedrop/releases/latest' # noqa: E501 + latest_release = requests.get(github_url).json() + latest_tag = str(latest_release["tag_name"]) + + subprocess.check_call(["git", "checkout", latest_tag]) + + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + ansible_base = os.path.join(str(securedrop_git_repo), + 'securedrop/install_files/ansible-base') + fullcmd = 'coverage run {0} --root {1} check_for_updates'.format( + cmd, ansible_base) + child = pexpect.spawn(fullcmd) + child.expect(b'All updates applied', timeout=20) + + child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + +@flaky(max_runs=3) +def test_update(securedrop_git_repo): + gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg') + set_reliable_keyserver(gpgdir) + + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + ansible_base = os.path.join(str(securedrop_git_repo), + 'securedrop/install_files/ansible-base') + child = pexpect.spawn('coverage run {0} --root {1} update'.format( + cmd, ansible_base)) + + output = child.read() + assert b'Updated to SecureDrop' in output + assert b'Signature verification successful' in output + + child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit + child.close() + assert child.exitstatus == 0 + assert child.signalstatus is None + + +@flaky(max_runs=3) +def test_update_fails_when_no_signature_present(securedrop_git_repo): + gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg') + set_reliable_keyserver(gpgdir) + + # First we make a very high version tag of SecureDrop so that the + # updater will try to update to it. Since the tag is unsigned, it + # should fail. + subprocess.check_call('git checkout develop'.split()) + subprocess.check_call('git tag 9999999.0.0'.split()) + + # Switch back to an older branch for the test + subprocess.check_call('git checkout 0.6'.split()) + + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + ansible_base = os.path.join(str(securedrop_git_repo), + 'securedrop/install_files/ansible-base') + child = pexpect.spawn('coverage run {0} --root {1} update'.format( + cmd, ansible_base)) + output = child.read() + assert b'Updated to SecureDrop' not in output + assert b'Signature verification failed' in output + + child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit + child.close() + + # Failures should eventually exit non-zero. + assert child.exitstatus != 0 + assert child.signalstatus != 0 + + +@flaky(max_runs=3) +def test_update_with_duplicate_branch_and_tag(securedrop_git_repo): + gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg') + set_reliable_keyserver(gpgdir) + + github_url = 'https://api.github.com/repos/freedomofpress/securedrop/releases/latest' # noqa: E501 + latest_release = requests.get(github_url).json() + latest_tag = str(latest_release["tag_name"]) + + # Create a branch with the same name as a tag. + subprocess.check_call(['git', 'checkout', '-b', latest_tag]) + # Checkout the older tag again in preparation for the update. + subprocess.check_call('git checkout 0.6'.split()) + + cmd = os.path.join(os.path.dirname(CURRENT_DIR), + 'securedrop_admin/__init__.py') + ansible_base = os.path.join(str(securedrop_git_repo), + 'securedrop/install_files/ansible-base') + + child = pexpect.spawn('coverage run {0} --root {1} update'.format( + cmd, ansible_base)) + output = child.read() + # Verify that we do not falsely check out a branch instead of a tag. + assert b'Switched to branch' not in output + assert b'Updated to SecureDrop' not in output + assert b'Signature verification failed' in output + + child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit + child.close() + assert child.exitstatus != 0 + assert child.signalstatus != 0 diff --git a/admin/tests/test_securedrop-admin-setup.py b/admin/tests/test_securedrop-admin-setup.py new file mode 100644 --- /dev/null +++ b/admin/tests/test_securedrop-admin-setup.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# +# SecureDrop whistleblower submission system +# Copyright (C) 2017 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# + +import argparse +import mock +import os +import pytest +import subprocess + +import bootstrap + + +class TestSecureDropAdmin(object): + + def test_verbose(self, capsys): + bootstrap.setup_logger(verbose=True) + bootstrap.sdlog.debug('VISIBLE') + out, err = capsys.readouterr() + assert 'VISIBLE' in out + + def test_not_verbose(self, capsys): + bootstrap.setup_logger(verbose=False) + bootstrap.sdlog.debug('HIDDEN') + bootstrap.sdlog.info('VISIBLE') + out, err = capsys.readouterr() + assert 'HIDDEN' not in out + assert 'VISIBLE' in out + + def test_run_command(self): + for output_line in bootstrap.run_command( + ['/bin/echo', 'something']): + assert output_line.strip() == b'something' + + lines = [] + with pytest.raises(subprocess.CalledProcessError): + for output_line in bootstrap.run_command( + ['sh', '-c', + 'echo in stdout ; echo in stderr >&2 ; false']): + lines.append(output_line.strip()) + assert lines[0] == b'in stdout' + assert lines[1] == b'in stderr' + + def test_install_pip_dependencies_up_to_date(self, caplog): + args = argparse.Namespace() + bootstrap.install_pip_dependencies(args, ['/bin/echo']) + assert 'securedrop-admin are up-to-date' in caplog.text + + def test_install_pip_dependencies_upgraded(self, caplog): + args = argparse.Namespace() + bootstrap.install_pip_dependencies( + args, ['/bin/echo', 'Successfully installed']) + assert 'securedrop-admin upgraded' in caplog.text + + def test_install_pip_dependencies_fail(self, caplog): + args = argparse.Namespace() + with pytest.raises(subprocess.CalledProcessError): + bootstrap.install_pip_dependencies( + args, ['/bin/sh', '-c', + 'echo in stdout ; echo in stderr >&2 ; false']) + assert 'Failed to install' in caplog.text + assert 'in stdout' in caplog.text + assert 'in stderr' in caplog.text + + def test_python3_stretch_venv_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(str(tmpdir), 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value=b"buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert 'Tails 3 Python 3 virtualenv detected.' in caplog.text + assert 'Tails 3 Python 3 virtualenv deleted.' in caplog.text + assert not os.path.exists(venv_path) + + def test_python3_buster_venv_not_deleted_in_buster(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.7') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="buster"): + bootstrap.clean_up_tails3_venv(venv_path) + assert ( + 'Tails 3 Python 3 virtualenv detected' not in caplog.text + ) + assert os.path.exists(venv_path) + + def test_python3_stretch_venv_not_deleted_in_stretch(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', return_value="stretch"): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path) + + def test_venv_cleanup_subprocess_exception(self, tmpdir, caplog): + venv_path = str(tmpdir) + python_lib_path = os.path.join(venv_path, 'lib/python3.5') + os.makedirs(python_lib_path) + with mock.patch('bootstrap.is_tails', return_value=True): + with mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError(1, + ':o')): + bootstrap.clean_up_tails3_venv(venv_path) + assert os.path.exists(venv_path) + + def test_envsetup_cleanup(self, tmpdir, caplog): + venv = os.path.join(str(tmpdir), "empty_dir") + args = "" + with pytest.raises(subprocess.CalledProcessError): + with mock.patch('subprocess.check_output', + side_effect=self.side_effect_venv_bootstrap(venv)): + bootstrap.envsetup(args, venv) + assert not os.path.exists(venv) + assert 'Cleaning up virtualenv' in caplog.text + + def side_effect_venv_bootstrap(self, venv_path): + # emulate the venv being created, and raise exception to simulate + # failure in virtualenv creation + os.makedirs(venv_path) + raise subprocess.CalledProcessError(1, ':o') diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py new file mode 100644 --- /dev/null +++ b/admin/tests/test_securedrop-admin.py @@ -0,0 +1,1005 @@ +# -*- coding: utf-8 -*- +# +# SecureDrop whistleblower submission system +# Copyright (C) 2017 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# + +import io +import os +import argparse +from flaky import flaky +from os.path import dirname, join, basename, exists +import json +import mock +from prompt_toolkit.validation import ValidationError +import pytest +import subprocess +import textwrap +import yaml + +import securedrop_admin + + +class Document(object): + def __init__(self, text): + self.text = text + + +@flaky +class TestSecureDropAdmin(object): + + def test_verbose(self, capsys): + securedrop_admin.setup_logger(verbose=True) + securedrop_admin.sdlog.debug('VISIBLE') + out, err = capsys.readouterr() + assert 'VISIBLE' in out + + def test_not_verbose(self, capsys): + securedrop_admin.setup_logger(verbose=False) + securedrop_admin.sdlog.debug('HIDDEN') + securedrop_admin.sdlog.info('VISIBLE') + out, err = capsys.readouterr() + assert 'HIDDEN' not in out + assert 'VISIBLE' in out + + def test_check_for_updates_update_needed(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = b"0.6" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "Update needed" in caplog.text + assert update_status is True + assert tag == '0.6.1' + + def test_check_for_updates_higher_version(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = b"0.6" + tags_available = b"0.1\n0.10.0\n0.6.2\n0.6\n0.6-rc1\n0.9.0\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "Update needed" in caplog.text + assert update_status is True + assert tag == '0.10.0' + + def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog): + """Regression test for #3426""" + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = b"0.6.1\n" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "All updates applied" in caplog.text + assert update_status is False + assert tag == '0.6.1' + + def test_check_for_updates_update_not_needed(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = b"0.6.1" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "All updates applied" in caplog.text + assert update_status is False + assert tag == '0.6.1' + + def test_check_for_updates_if_most_recent_tag_is_rc(self, tmpdir, caplog): + """During pre-release QA, the most recent tag ends in *-rc. Let's + verify that users will not accidentally check out this tag.""" + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + current_tag = b"0.6.1" + tags_available = b"0.6\n0.6-rc1\n0.6.1\n0.6.1-rc1\n" + + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=[current_tag, tags_available]): + update_status, tag = securedrop_admin.check_for_updates(args) + assert "All updates applied" in caplog.text + assert update_status is False + assert tag == '0.6.1' + + def test_update_exits_if_not_needed(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(False, "0.6.1")): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code == 0 + + def test_get_release_key_from_valid_keyserver(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + with mock.patch('subprocess.check_call'): + # Check that no exception is raised when the process is fast + securedrop_admin.get_release_key_from_keyserver(args) + + # Check that use of the keyword arg also raises no exception + securedrop_admin.get_release_key_from_keyserver( + args, keyserver='test.com') + + @pytest.mark.parametrize("git_output", + [b'gpg: Signature made Tue 13 Mar ' + b'2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n', + + b'gpg: Signature made Thu 20 Jul ' + b'2017 08:12:25 PM EDT\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key ' + b'<[email protected]>"\n', + + b'gpg: Signature made Thu 20 Jul ' + b'2017 08:12:25 PM EDT\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n' + b'gpg: aka "SecureDrop Release ' + b'Signing Key ' + b'<[email protected]>" ' + b'[unknown]\n']) + def test_update_signature_verifies(self, tmpdir, caplog, git_output): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + patchers = [ + mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")), + mock.patch('subprocess.check_call'), + mock.patch('subprocess.check_output', + side_effect=[ + git_output, + subprocess.CalledProcessError(1, 'cmd', + b'not a valid ref')]), + ] + + for patcher in patchers: + patcher.start() + + try: + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification successful." in caplog.text + assert "Updated to SecureDrop" in caplog.text + assert ret_code == 0 + finally: + for patcher in patchers: + patcher.stop() + + def test_update_unexpected_exception_git_refs(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: Good signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n') + + patchers = [ + mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")), + mock.patch('subprocess.check_call'), + mock.patch('subprocess.check_output', + side_effect=[ + git_output, + subprocess.CalledProcessError(1, 'cmd', + b'a random error')]), + ] + + for patcher in patchers: + patcher.start() + + try: + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification successful." not in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code == 1 + finally: + for patcher in patchers: + patcher.stop() + + def test_update_signature_does_not_verify(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'22245C81E3BAEB4138B36061310F561200F4AD77\n' + b'gpg: BAD signature from "SecureDrop Release ' + b'Signing Key" [unknown]\n') + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")): + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + return_value=git_output): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification failed." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code != 0 + + def test_update_malicious_key_named_fingerprint(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from "22245C81E3BAEB4138' + b'B36061310F561200F4AD77" [unknown]\n') + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")): + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + return_value=git_output): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification failed." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code != 0 + + def test_update_malicious_key_named_good_sig(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from Good signature from ' + b'"SecureDrop Release Signing Key" [unknown]\n') + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")): + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + return_value=git_output): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification failed." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code != 0 + + def test_update_malicious_key_named_good_sig_fingerprint(self, tmpdir, + caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + git_output = (b'gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n' + b'gpg: using RSA key ' + b'1234567812345678123456781234567812345678\n' + b'gpg: Good signature from 22245C81E3BAEB4138' + b'B36061310F561200F4AD77 Good signature from ' + b'"SecureDrop Release Signing Key" [unknown]\n') + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")): + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + return_value=git_output): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification failed." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code != 0 + + def test_no_signature_on_update(self, tmpdir, caplog): + git_repo_path = str(tmpdir) + args = argparse.Namespace(root=git_repo_path) + + with mock.patch('securedrop_admin.check_for_updates', + return_value=(True, "0.6.1")): + with mock.patch('subprocess.check_call'): + with mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError( + 1, 'git', 'error: no signature found') + ): + ret_code = securedrop_admin.update(args) + assert "Applying SecureDrop updates..." in caplog.text + assert "Signature verification failed." in caplog.text + assert "Updated to SecureDrop" not in caplog.text + assert ret_code != 0 + + def test_exit_codes(self, tmpdir): + """Ensure that securedrop-admin returns the correct + exit codes for success or failure.""" + with mock.patch( + 'securedrop_admin.install_securedrop', + return_value=True): + with pytest.raises(SystemExit) as e: + securedrop_admin.main( + ['--root', str(tmpdir), 'install']) + assert e.value.code == securedrop_admin.EXIT_SUCCESS + + with mock.patch( + 'securedrop_admin.install_securedrop', + side_effect=subprocess.CalledProcessError(1, 'TestError')): + with pytest.raises(SystemExit) as e: + securedrop_admin.main( + ['--root', str(tmpdir), 'install']) + assert e.value.code == securedrop_admin.EXIT_SUBPROCESS_ERROR + + with mock.patch( + 'securedrop_admin.install_securedrop', + side_effect=KeyboardInterrupt): + with pytest.raises(SystemExit) as e: + securedrop_admin.main( + ['--root', str(tmpdir), 'install']) + assert e.value.code == securedrop_admin.EXIT_INTERRUPT + + +class TestSiteConfig(object): + + def test_exists(self): + args = argparse.Namespace(site_config='DOES_NOT_EXIST', + ansible_path='.', + app_path=dirname(__file__)) + assert not securedrop_admin.SiteConfig(args).exists() + args = argparse.Namespace(site_config=__file__, + ansible_path='.', + app_path=dirname(__file__)) + assert securedrop_admin.SiteConfig(args).exists() + + def test_validate_not_empty(self): + validator = securedrop_admin.SiteConfig.ValidateNotEmpty() + + assert validator.validate(Document('something')) + with pytest.raises(ValidationError): + validator.validate(Document('')) + + def test_validate_time(self): + validator = securedrop_admin.SiteConfig.ValidateTime() + + assert validator.validate(Document('4')) + with pytest.raises(ValidationError): + validator.validate(Document('')) + with pytest.raises(ValidationError): + validator.validate(Document('four')) + with pytest.raises(ValidationError): + validator.validate(Document('4.30')) + with pytest.raises(ValidationError): + validator.validate(Document('25')) + with pytest.raises(ValidationError): + validator.validate(Document('-4')) + + def test_validate_ossec_username(self): + validator = securedrop_admin.SiteConfig.ValidateOSSECUsername() + + assert validator.validate(Document('username')) + with pytest.raises(ValidationError): + validator.validate(Document('bad@user')) + with pytest.raises(ValidationError): + validator.validate(Document('test')) + + def test_validate_ossec_password(self): + validator = securedrop_admin.SiteConfig.ValidateOSSECPassword() + + assert validator.validate(Document('goodpassword')) + with pytest.raises(ValidationError): + validator.validate(Document('password123')) + with pytest.raises(ValidationError): + validator.validate(Document('')) + with pytest.raises(ValidationError): + validator.validate(Document('short')) + + def test_validate_email(self): + validator = securedrop_admin.SiteConfig.ValidateEmail() + + assert validator.validate(Document('[email protected]')) + with pytest.raises(ValidationError): + validator.validate(Document('badmail')) + with pytest.raises(ValidationError): + validator.validate(Document('')) + + def test_validate_ossec_email(self): + validator = securedrop_admin.SiteConfig.ValidateOSSECEmail() + + assert validator.validate(Document('[email protected]')) + with pytest.raises(ValidationError) as e: + validator.validate(Document('[email protected]')) + assert 'something other than [email protected]' in str(e) + + def test_validate_optional_email(self): + validator = securedrop_admin.SiteConfig.ValidateOptionalEmail() + + assert validator.validate(Document('[email protected]')) + assert validator.validate(Document('')) + + def test_validate_user(self): + validator = securedrop_admin.SiteConfig.ValidateUser() + with pytest.raises(ValidationError): + validator.validate(Document("amnesia")) + with pytest.raises(ValidationError): + validator.validate(Document("root")) + with pytest.raises(ValidationError): + validator.validate(Document("")) + assert validator.validate(Document("gooduser")) + + def test_validate_ip(self): + validator = securedrop_admin.SiteConfig.ValidateIP() + with pytest.raises(ValidationError): + validator.validate(Document("599.20")) + assert validator.validate(Document("192.168.1.1")) + + def test_validate_path(self): + mydir = dirname(__file__) + myfile = basename(__file__) + validator = securedrop_admin.SiteConfig.ValidatePath(mydir) + assert validator.validate(Document(myfile)) + with pytest.raises(ValidationError): + validator.validate(Document("NONEXIST")) + with pytest.raises(ValidationError): + validator.validate(Document("")) + + def test_validate_optional_path(self): + mydir = dirname(__file__) + myfile = basename(__file__) + validator = securedrop_admin.SiteConfig.ValidateOptionalPath(mydir) + assert validator.validate(Document(myfile)) + assert validator.validate(Document("")) + + def test_validate_yes_no(self): + validator = securedrop_admin.SiteConfig.ValidateYesNo() + with pytest.raises(ValidationError): + validator.validate(Document("something")) + assert validator.validate(Document("yes")) + assert validator.validate(Document("YES")) + assert validator.validate(Document("no")) + assert validator.validate(Document("NO")) + + def test_validate_fingerprint(self): + validator = securedrop_admin.SiteConfig.ValidateFingerprint() + assert validator.validate(Document( + "012345678901234567890123456789ABCDEFABCD")) + assert validator.validate(Document( + "01234 5678901234567890123456789ABCDE FABCD")) + + with pytest.raises(ValidationError) as e: + validator.validate(Document( + "65A1B5FF195B56353CC63DFFCC40EF1228271441")) + assert 'TEST journalist' in str(e) + + with pytest.raises(ValidationError) as e: + validator.validate(Document( + "600BC6D5142C68F35DDBCEA87B597104EDDDC102")) + assert 'TEST admin' in str(e) + + with pytest.raises(ValidationError) as e: + validator.validate(Document( + "0000")) + assert '40 hexadecimal' in str(e) + + with pytest.raises(ValidationError) as e: + validator.validate(Document( + "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz")) + assert '40 hexadecimal' in str(e) + + def test_validate_optional_fingerprint(self): + validator = securedrop_admin.SiteConfig.ValidateOptionalFingerprint() + assert validator.validate(Document( + "012345678901234567890123456789ABCDEFABCD")) + assert validator.validate(Document("")) + + def test_sanitize_fingerprint(self): + args = argparse.Namespace(site_config='DOES_NOT_EXIST', + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + assert "ABC" == site_config.sanitize_fingerprint(" A bc") + + def test_validate_int(self): + validator = securedrop_admin.SiteConfig.ValidateInt() + with pytest.raises(ValidationError): + validator.validate(Document("123X")) + assert validator.validate(Document("192")) + + def test_locales(self): + locales = securedrop_admin.SiteConfig.Locales(dirname(__file__)) + translations = locales.get_translations() + assert 'en_US' in translations + assert 'fr_FR' in translations + + def test_validate_locales(self): + validator = securedrop_admin.SiteConfig.ValidateLocales( + dirname(__file__)) + assert validator.validate(Document('en_US fr_FR ')) + with pytest.raises(ValidationError) as e: + validator.validate(Document('BAD')) + assert 'BAD' in str(e) + + def test_save(self, tmpdir): + site_config_path = join(str(tmpdir), 'site_config') + args = argparse.Namespace(site_config=site_config_path, + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.config = {'var1': u'val1', 'var2': u'val2'} + site_config.save() + expected = textwrap.dedent("""\ + var1: val1 + var2: val2 + """) + assert expected == io.open(site_config_path).read() + + def test_validate_gpg_key(self, caplog): + args = argparse.Namespace(site_config='INVALID', + ansible_path='tests/files', + app_path=dirname(__file__)) + good_config = { + 'securedrop_app_gpg_public_key': + 'test_journalist_key.pub', + + 'securedrop_app_gpg_fingerprint': + '65A1B5FF195B56353CC63DFFCC40EF1228271441', + + 'ossec_alert_gpg_public_key': + 'test_journalist_key.pub', + + 'ossec_gpg_fpr': + '65A1B5FF195B56353CC63DFFCC40EF1228271441', + + 'journalist_alert_gpg_public_key': + 'test_journalist_key.pub', + + 'journalist_gpg_fpr': + '65A1B5FF195B56353CC63DFFCC40EF1228271441', + } + site_config = securedrop_admin.SiteConfig(args) + site_config.config = good_config + assert site_config.validate_gpg_keys() + + for key in ('securedrop_app_gpg_fingerprint', + 'ossec_gpg_fpr', + 'journalist_gpg_fpr'): + bad_config = good_config.copy() + bad_config[key] = 'FAIL' + site_config.config = bad_config + with pytest.raises(securedrop_admin.FingerprintException) as e: + site_config.validate_gpg_keys() + assert 'FAIL does not match' in str(e) + + def test_journalist_alert_email(self): + args = argparse.Namespace(site_config='INVALID', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.config = { + 'journalist_alert_gpg_public_key': + '', + + 'journalist_gpg_fpr': + '', + } + assert site_config.validate_journalist_alert_email() + site_config.config = { + 'journalist_alert_gpg_public_key': + 'test_journalist_key.pub', + + 'journalist_gpg_fpr': + '65A1B5FF195B56353CC63DFFCC40EF1228271441', + } + site_config.config['journalist_alert_email'] = '' + with pytest.raises( + securedrop_admin.JournalistAlertEmailException) as e: + site_config.validate_journalist_alert_email() + assert 'not be empty' in str(e) + + site_config.config['journalist_alert_email'] = 'bademail' + with pytest.raises( + securedrop_admin.JournalistAlertEmailException) as e: + site_config.validate_journalist_alert_email() + assert 'Must contain a @' in str(e) + + site_config.config['journalist_alert_email'] = '[email protected]' + assert site_config.validate_journalist_alert_email() + + @mock.patch('securedrop_admin.SiteConfig.validated_input', + side_effect=lambda p, d, v, t: d) + @mock.patch('securedrop_admin.SiteConfig.save') + def test_update_config(self, mock_save, mock_validate_input): + args = argparse.Namespace(site_config='tests/files/site-specific', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + + assert site_config.load_and_update_config() + assert 'user_defined_variable' in site_config.config + mock_save.assert_called_once() + mock_validate_input.assert_called() + + @mock.patch('securedrop_admin.SiteConfig.validated_input', + side_effect=lambda p, d, v, t: d) + @mock.patch('securedrop_admin.SiteConfig.validate_gpg_keys') + def test_update_config_no_site_specific( + self, + validate_gpg_keys, + mock_validate_input, + tmpdir): + site_config_path = join(str(tmpdir), 'site_config') + args = argparse.Namespace(site_config=site_config_path, + ansible_path='.', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + assert site_config.load_and_update_config() + mock_validate_input.assert_called() + validate_gpg_keys.assert_called_once() + assert exists(site_config_path) + + def test_load_and_update_config(self): + args = argparse.Namespace(site_config='tests/files/site-specific', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with mock.patch('securedrop_admin.SiteConfig.update_config'): + site_config.load_and_update_config() + assert site_config.config != {} + + args = argparse.Namespace( + site_config='tests/files/site-specific-missing-entries', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with mock.patch('securedrop_admin.SiteConfig.update_config'): + site_config.load_and_update_config() + assert site_config.config != {} + + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with mock.patch('securedrop_admin.SiteConfig.update_config'): + site_config.load_and_update_config() + assert site_config.config == {} + + def get_desc(self, site_config, var): + for desc in site_config.desc: + if desc[0] == var: + return desc + + def verify_desc_consistency_optional(self, site_config, desc): + (var, default, etype, prompt, validator, transform, condition) = desc + # verify the default passes validation + if callable(default): + default = default() + assert site_config.user_prompt_config_one(desc, None) == default + assert type(default) == etype + + def verify_desc_consistency(self, site_config, desc): + self.verify_desc_consistency_optional(site_config, desc) + (var, default, etype, prompt, validator, transform, condition) = desc + with pytest.raises(ValidationError): + site_config.user_prompt_config_one(desc, '') + # If we are testing v3_onion_services, that will create a default + # value of 'yes', means it will not raise the ValidationError. We + # are generating it below for test to behave properly with all + # other test cases. + if var == "v3_onion_services": + raise ValidationError() + + def verify_prompt_boolean( + self, site_config, desc): + self.verify_desc_consistency(site_config, desc) + (var, default, etype, prompt, validator, transform, condition) = desc + assert site_config.user_prompt_config_one(desc, True) is True + assert site_config.user_prompt_config_one(desc, False) is False + assert site_config.user_prompt_config_one(desc, 'YES') is True + assert site_config.user_prompt_config_one(desc, 'NO') is False + + def verify_prompt_boolean_for_v3( + self, site_config, desc): + """As v3_onion_services input depends on input of + v2_onion_service, the answers will change. + """ + self.verify_desc_consistency(site_config, desc) + (var, default, etype, prompt, validator, transform, condition) = desc + assert site_config.user_prompt_config_one(desc, True) is True + # Because if no v2_onion_service, v3 will become True + assert site_config.user_prompt_config_one(desc, False) is True + assert site_config.user_prompt_config_one(desc, 'YES') is True + # Because if no v2_onion_service, v3 will become True + assert site_config.user_prompt_config_one(desc, 'NO') is True + + # Now we will set v2_onion_services as True so that we + # can set v3_onion_service as False. This is the case + # when an admin particularly marked v3 as False. + site_config._config_in_progress = {"v2_onion_services": True} + site_config.config = {"v3_onion_services": False} + + # The next two tests should use the default from the above line, + # means it will return False value. + assert site_config.user_prompt_config_one(desc, True) is False + assert site_config.user_prompt_config_one(desc, 'YES') is False + + assert site_config.user_prompt_config_one(desc, False) is False + assert site_config.user_prompt_config_one(desc, 'NO') is False + + def test_desc_conditional(self): + """Ensure that conditional prompts behave correctly. + + Prompts which depend on another question should only be + asked if the prior question was answered appropriately.""" + + questions = [ + ['first_question', + False, + bool, + u'Test Question 1', + None, + lambda x: x.lower() == 'yes', + lambda config: True], + ['dependent_question', + 'default_value', + str, + u'Test Question 2', + None, + None, + lambda config: config.get('first_question', False)] + ] + args = argparse.Namespace(site_config='tests/files/site-specific', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.desc = questions + + def auto_prompt(prompt, default, **kwargs): + return default + + with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt): + config = site_config.user_prompt_config() + assert config['dependent_question'] != 'default_value' + + site_config.desc[0][1] = True + + config = site_config.user_prompt_config() + assert config['dependent_question'] == 'default_value' + + verify_prompt_ssh_users = verify_desc_consistency + verify_prompt_app_ip = verify_desc_consistency + verify_prompt_monitor_ip = verify_desc_consistency + verify_prompt_app_hostname = verify_desc_consistency + verify_prompt_monitor_hostname = verify_desc_consistency + verify_prompt_dns_server = verify_desc_consistency + + verify_prompt_securedrop_app_https_on_source_interface = \ + verify_prompt_boolean + verify_prompt_enable_ssh_over_tor = verify_prompt_boolean + + verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency + verify_prompt_v2_onion_services = verify_prompt_boolean + verify_prompt_v3_onion_services = verify_prompt_boolean_for_v3 + + def verify_prompt_not_empty(self, site_config, desc): + with pytest.raises(ValidationError): + site_config.user_prompt_config_one(desc, '') + + def verify_prompt_fingerprint_optional(self, site_config, desc): + fpr = "0123456 789012 34567890123456789ABCDEFABCD" + clean_fpr = site_config.sanitize_fingerprint(fpr) + assert site_config.user_prompt_config_one(desc, fpr) == clean_fpr + + def verify_desc_consistency_allow_empty(self, site_config, desc): + (var, default, etype, prompt, validator, transform, condition) = desc + # verify the default passes validation + assert site_config.user_prompt_config_one(desc, None) == default + assert type(default) == etype + + def verify_prompt_fingerprint(self, site_config, desc): + self.verify_prompt_not_empty(site_config, desc) + self.verify_prompt_fingerprint_optional(site_config, desc) + + verify_prompt_securedrop_app_gpg_fingerprint = verify_prompt_fingerprint + verify_prompt_ossec_alert_gpg_public_key = verify_desc_consistency + verify_prompt_ossec_gpg_fpr = verify_prompt_fingerprint + verify_prompt_ossec_alert_email = verify_prompt_not_empty + verify_prompt_journalist_alert_gpg_public_key = ( + verify_desc_consistency_optional) + verify_prompt_journalist_gpg_fpr = verify_prompt_fingerprint_optional + verify_prompt_journalist_alert_email = verify_desc_consistency_optional + verify_prompt_securedrop_app_https_certificate_chain_src = ( + verify_desc_consistency_optional) + verify_prompt_securedrop_app_https_certificate_key_src = ( + verify_desc_consistency_optional) + verify_prompt_securedrop_app_https_certificate_cert_src = ( + verify_desc_consistency_optional) + verify_prompt_smtp_relay = verify_prompt_not_empty + verify_prompt_smtp_relay_port = verify_desc_consistency + verify_prompt_daily_reboot_time = verify_desc_consistency + verify_prompt_sasl_domain = verify_desc_consistency_allow_empty + verify_prompt_sasl_username = verify_prompt_not_empty + verify_prompt_sasl_password = verify_prompt_not_empty + + def verify_prompt_securedrop_supported_locales(self, site_config, desc): + (var, default, etype, prompt, validator, transform, condition) = desc + # verify the default passes validation + assert site_config.user_prompt_config_one(desc, None) == default + assert type(default) == etype + assert site_config.user_prompt_config_one( + desc, 'fr_FR en_US') == ['fr_FR', 'en_US'] + assert site_config.user_prompt_config_one( + desc, ['fr_FR', 'en_US']) == ['fr_FR', 'en_US'] + assert site_config.user_prompt_config_one(desc, '') == [] + with pytest.raises(ValidationError): + site_config.user_prompt_config_one(desc, 'wrong') + + def test_user_prompt_config_one(self): + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + + def auto_prompt(prompt, default, **kwargs): + if 'validator' in kwargs: + assert kwargs['validator'].validate(Document(default)) + return default + + with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt): + for desc in site_config.desc: + (var, default, etype, prompt, validator, transform, + condition) = desc + method = 'verify_prompt_' + var + print("checking " + method) + getattr(self, method)(site_config, desc) + + def test_validated_input(self): + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + + def auto_prompt(prompt, default, **kwargs): + return default + + with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt): + value = 'VALUE' + assert value == site_config.validated_input( + '', value, lambda: True, None) + assert value.lower() == site_config.validated_input( + '', value, lambda: True, str.lower) + assert 'yes' == site_config.validated_input( + '', True, lambda: True, None) + assert 'no' == site_config.validated_input( + '', False, lambda: True, None) + assert '1234' == site_config.validated_input( + '', 1234, lambda: True, None) + assert "a b" == site_config.validated_input( + '', ['a', 'b'], lambda: True, None) + assert "{}" == site_config.validated_input( + '', {}, lambda: True, None) + + def test_load(self, caplog): + args = argparse.Namespace(site_config='tests/files/site-specific', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + assert 'app_hostname' in site_config.load() + + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with pytest.raises(IOError) as e: + site_config.load() + assert 'No such file' in e.value.strerror + assert 'Config file missing' in caplog.text + + args = argparse.Namespace(site_config='tests/files/corrupted', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + with pytest.raises(yaml.YAMLError) as e: + site_config.load() + assert 'issue processing' in caplog.text + + +def test_generate_new_v3_keys(): + public, private = securedrop_admin.generate_new_v3_keys() + + for key in [public, private]: + # base32 padding characters should be removed + assert '=' not in key + assert len(key) == 52 + + +def test_find_or_generate_new_torv3_keys_first_run(tmpdir, capsys): + args = argparse.Namespace(ansible_path=str(tmpdir)) + + return_code = securedrop_admin.find_or_generate_new_torv3_keys(args) + + captured = capsys.readouterr() + assert 'Tor v3 onion service keys generated' in captured.out + assert return_code == 0 + + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + + with open(secret_key_path) as f: + v3_onion_service_keys = json.load(f) + + expected_keys = ['app_journalist_public_key', + 'app_journalist_private_key', + 'app_ssh_public_key', + 'app_ssh_private_key', + 'mon_ssh_public_key', + 'mon_ssh_private_key'] + for key in expected_keys: + assert key in v3_onion_service_keys.keys() + + +def test_find_or_generate_new_torv3_keys_subsequent_run(tmpdir, capsys): + args = argparse.Namespace(ansible_path=str(tmpdir)) + + secret_key_path = os.path.join(args.ansible_path, + "tor_v3_keys.json") + old_keys = {'foo': 'bar'} + with open(secret_key_path, 'w') as f: + json.dump(old_keys, f) + + return_code = securedrop_admin.find_or_generate_new_torv3_keys(args) + + captured = capsys.readouterr() + assert 'Tor v3 onion service keys already exist' in captured.out + assert return_code == 0 + + with open(secret_key_path) as f: + v3_onion_service_keys = json.load(f) + + assert v3_onion_service_keys == old_keys + + +def test_v3_and_https_cert_message(tmpdir, capsys): + args = argparse.Namespace(site_config='UNKNOWN', + ansible_path='tests/files', + app_path=dirname(__file__)) + site_config = securedrop_admin.SiteConfig(args) + site_config.config = {"v3_onion_services": False, + "securedrop_app_https_certificate_cert_src": "ab.crt"} # noqa: E501 + # This should return True as v3 is not setup + assert site_config.validate_https_and_v3() + + # This should return False as v3 and https are both setup + site_config.config.update({"v3_onion_services": True}) + assert not site_config.validate_https_and_v3() + + # This should return True as https is not setup + site_config.config.update({"securedrop_app_https_certificate_cert_src": ""}) # noqa: E501 + assert site_config.validate_https_and_v3() diff --git a/install_files/securedrop-app-code/DEBIAN/templates b/admin/tests/translations/fr_FR similarity index 100% rename from install_files/securedrop-app-code/DEBIAN/templates rename to admin/tests/translations/fr_FR diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst --- a/docs/development/testing_application_tests.rst +++ b/docs/development/testing_application_tests.rst @@ -32,25 +32,28 @@ and app-staging VMs, based on the contents of ``securedrop/requirements/test-requirements.txt``. If you wish to change the dependencies, see :ref:`updating_pip_dependencies`. -Running the application tests +Running the Application Tests ----------------------------- The tests can be run inside the development VM: .. code:: sh - vagrant ssh development - cd /vagrant/securedrop - pytest -v tests + make test Or the app-staging VM: .. code:: sh vagrant ssh app-staging - sudo su www-data -s /bin/bash + sudo bash cd /var/www/securedrop pytest -v tests + chown -R www-data /var/lib/securedrop /var/www/securedrop + +.. warning:: The ``chown`` is necessary because running the tests as + root will change ownership of some files, creating + problems with the source and journalist interfaces. For explanation of the difference between these machines, see :doc:`virtual_environments`. @@ -59,18 +62,15 @@ If you just want to run the functional tests, you can use: .. code:: sh - pytest -v tests/functional/ + securedrop/bin/dev-shell bin/run-test -v tests/functional Similarly, if you want to run a single test, you can specify it through the file, class, and test name: .. code:: sh - pytest tests/test_journalist.py::TestJournalistApp::test_invalid_credentials - -Some Selenium tests are decorated to produce before and after screenshots to aid -in debugging. This behavior is enabled with the ``SCREENSHOTS_ENABLED`` environment -variable. Output PNG files will be placed in the ``tests/log/`` directory. + securedrop/bin/dev-shell bin/run-test \ + tests/test_journalist.py::TestJournalistApp::test_invalid_credentials The `gnupg <https://pythonhosted.org/python-gnupg>`_ library can be quite verbose in its @@ -79,10 +79,6 @@ be controlled via the ``GNUPG_LOG_LEVEL`` environment variable. It can have valu such as ``INFO`` or ``DEBUG`` if some particular test case or test run needs greater verbosity. -.. code:: sh - - SCREENSHOTS_ENABLED=1 pytest tests/functional/ - Page Layout Tests ~~~~~~~~~~~~~~~~~ @@ -96,10 +92,10 @@ option: .. code:: sh - pytest tests/ --page-layout + securedrop/bin/dev-shell bin/run-test --page-layout tests -Updating the application tests +Updating the Application Tests ------------------------------ Unit tests are stored in the ``securedrop/tests/`` directory and functional @@ -114,7 +110,7 @@ tests are stored in the functional test directory:: ├── utils │ ├── db_helper.py │ ├── env.py - │ └── async.py + │ └── asynchronous.py ├── test_journalist.py ├── test_source.py │ ... diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst --- a/docs/development/testing_configuration_tests.rst +++ b/docs/development/testing_configuration_tests.rst @@ -3,7 +3,7 @@ Testing: Configuration Tests ============================ -Testinfra_ tests verify the end state of the Vagrant machines. Any +Testinfra_ tests verify the end state of the staging VMs. Any changes to the Ansible configuration should have a corresponding spectest. @@ -14,45 +14,53 @@ Installation .. code:: sh - pip install -r securedrop/requirements/develop-requirements.txt + pip install --no-deps --require-hashes -r securedrop/requirements/python3/develop-requirements.txt -Running the config tests + +Running the Config Tests ------------------------ In order to run the tests, first create and provision the VM you intend -to test. For the development VM: +to test. + +For the staging VMs: .. code:: sh - vagrant up development + make build-debs + make staging -For the staging VMs: +The VMs will be set up using either the libvirt or virtualbox Vagrant VM provider, +depending on your system settings. You'll need to use the appropriate commands below +based on your choice of provider. + +Then, to run the tests: + +libvirt: +~~~~~~~~ .. code:: sh - make build-debs - vagrant up /staging/ + molecule verify -s libvirt-staging-xenial -Running all VMs concurrently may cause performance -problems if you have less than 8GB of RAM. You can isolate specific -machines for faster testing: +virtualbox: +~~~~~~~~~~~ .. code:: sh - ./testinfra/test.py development - ./testinfra/test.py app-staging - ./testinfra/test.py mon-staging + molecule verify -s virtualbox-staging-xenial -.. note:: The config tests for the ``app-prod`` and ``mon-prod`` hosts are - incomplete. Further changes are necessary to run the tests via - SSH over Authenticated Tor Hidden Service (ATHS), for both local - testing via Vagrant and automated testing via CI. +.. tip:: To run only a single test, set ``PYTEST_ADDOPTS="-k name_of_test"`` + in your environment. Test failure against any host will generate a report with informative output -about the specific test that triggered the error. The wrapper script +about the specific test that triggered the error. Molecule will also exit with a non-zero status code. -Updating the config tests +.. note:: To build and test the VMs with one command, use the Molecule ``test`` + action: ``molecule test -s libvirt-staging-xenial --destroy=never``, or ``molecule test -s virtualbox-staging-xenial --destroy=never``. + +Updating the Config Tests ------------------------- Changes to the Ansible config should result in failing config tests, but @@ -61,20 +69,19 @@ sure to add a corresponding spectest to validate that state after a new provisioning run. Tests import variables from separate YAML files than the Ansible playbooks: :: - testinfra/vars/ + molecule/testinfra/staging/vars/ ├── app-prod.yml ├── app-staging.yml - ├── build.yml - ├── development.yml ├── mon-prod.yml - └── mon-staging.yml + ├── mon-staging.yml + └── staging.yml Any variable changes in the Ansible config should have a corresponding entry in these vars files. These vars are dynamically loaded for each -host via the ``testinfra/conftest.py`` file. Make sure to add your tests to -relevant location for the host you plan to test: :: +host via the ``molecule/testinfra/staging/conftest.py`` file. Make sure to add +your tests to the relevant location for the host you plan to test: :: - testinfra/app/ + molecule/testinfra/staging/app/ ├── apache │ ├── test_apache_journalist_interface.py │ ├── test_apache_service.py @@ -86,31 +93,31 @@ relevant location for the host you plan to test: :: └── test_ossec.py In the example above, to add a new test for the ``app-staging`` host, -add a new file to the ``testinfra/spec/app-staging`` directory. +add a new file to the ``testinfra/staging/app`` directory. .. tip:: Read :ref:`updating_ossec_rules` to learn how to write tests for the OSSEC rules. -Config test layout +Config Test Layout ------------------ -The config tests are mostly broken up according to machines in the -Vagrantfile: :: +With some exceptions, the config tests are broken up according to platform definitions in the +Molecule configuration: :: - testinfra/ + molecule/testinfra/staging ├── app ├── app-code - ├── build ├── common - ├── development - └── mon + ├── mon + ├── ossec + └── vars Ideally the config tests would be broken up according to roles, mirroring the Ansible configuration. Prior to the reorganization of the Ansible layout, the tests are rather tightly coupled to hosts. The layout of config tests is therefore subject to change. -Config testing strategy +Config Testing Strategy ----------------------- The config tests currently emphasize testing implementation rather than diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst --- a/docs/development/testing_continuous_integration.rst +++ b/docs/development/testing_continuous_integration.rst @@ -3,44 +3,51 @@ Testing: CI =========== -The SecureDrop project uses CircleCI_ for running automated test suites on code changes: +The SecureDrop project uses CircleCI_ for running automated test suites on code changes. .. _CircleCI: http://circleci.com/gh/freedomofpress/securedrop/ -CI test layout --------------- - -The relevant files for configuring the CI tests are: :: - - ├── .circleci <--- folder contains config for CircleCI - ├── devops - │ ├── inventory <-- environment specific inventory - │ ├── playbooks <-- playbooks to start CI boxes - │ ├── scripts <-- shell wrapper scripts - │ ├── templates <-- contains templates for ansible tasks - │ └── vars <-- environment specific variables - └── Makefile <-- defines make task shortcuts - -The files under ``devops/`` are used to create a minimized staging environment -on AWS EC2. The CircleCI host is used as the Ansible controller to provision -the machines and run the :ref:`config_tests` against them. - -Running the CI staging environment +The relevant files for configuring the CI tests are the ``Makefile`` in +the main repo, the configuration file at ``.circleci/config.yml``, and +the scripts in ``devops/``. You may want to consult the +`CircleCI Configuration Reference <https://circleci.com/docs/2.0/configuration-reference/>`__ +to interpret the configuration file. Review the ``workflows`` section of the +configuration file to understand which jobs are run by CircleCI. + +The files under ``devops/`` are used to create a libvirt-compatible environment on GCE. +The GCE host is used as the Ansible controller, mimicking a developer's laptop, +to provision the machines and run the :ref:`tests <config_tests>` against them. + +.. note:: We skip unnecessary jobs, such as the staging run, for pull requests that only + affect the documentation; to do so, we check whether the branch name begins with + ``docs-``. These checks are enforced in different parts of the configuration, + mainly within the ``Makefile``. + +.. warning:: In CI, we rebase branches in PRs on HEAD of the target branch. + This rebase does not occur for branches that are not in PRs. + When a branch is pushed to the shared ``freedomofpress`` remote, CI will run, + a rebase will not occur, and since opening a + `PR does not trigger a re-build <https://discuss.circleci.com/t/pull-requests-not-triggering-build/1213>`_, + the CI build results are not shown rebased on the latest of the target branch. + This is important to maintain awareness of if your branch is behind the target + branch. Once your branch is in a PR, you can rebuild, push an additional + commit, or manually rebase your branch to update the CI results. + +Running the CI Staging Environment ---------------------------------- -The staging environment tests will run automatically in CircleCI, -when changes are submitted by Freedom of the Press Foundation staff -(i.e. members of the ``freedomofpress`` GitHub organization). +The staging environment tests will run automatically in CircleCI, when +changes are submitted by Freedom of the Press Foundation staff (i.e. members +of the ``freedomofpress`` GitHub organization). The tests also perform +basic linting and validation, like checking for formatting errors in the +Sphinx documentation. -It also performs basic linting and validation, e.g. checking for mistakes in -the Sphinx documentation. +.. tip:: You will need a Google Cloud Platform account to proceed. + See the `Google Cloud Platform Getting Started Guide`_ for detailed instructions. -.. tip:: You will need an Amazon Web Services EC2 account to proceed. - See the `AWS Getting Started Guide`_ for detailed instructions. +.. _Google Cloud Platform Getting Started Guide: https://cloud.google.com/getting-started/ -.. _AWS Getting Started Guide: https://aws.amazon.com/ec2/getting-started/ - -In addition to an EC2 account, you will need a working `Docker installation`_ in +In addition to a GCP account, you will need a working `Docker installation`_ in order to run the container that builds the deb packages. You can verify that your Docker installation is working by running @@ -55,24 +62,25 @@ output as shown below: This message shows that your installation appears to be working correctly. ... -.. _Docker installation: https://www.docker.com/community-edition#/download +.. _Docker installation: https://docs.docker.com/install/ -Setup environment parameters +Setup Environment Parameters ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Source the setup script using the following command: .. code:: sh - source ./devops/scripts/local-setup.sh + source ./devops/gce-nested/ci-env.sh You will be prompted for the values of the required environment variables. There -are some defaults set that you may want to change. You will need to determine -the value of your VPC ID to use which is outside the scope of this guide. - +are some defaults set that you may want to change. You will need to export +``GOOGLE_CREDENTIALS`` with authentication details for your GCP account, +which is outside the scope of this guide. -Use Makefile to provision hosts +Use Makefile to Provision Hosts ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Run ``make help`` to see the full list of CI commands in the Makefile: .. code:: sh @@ -80,28 +88,17 @@ Run ``make help`` to see the full list of CI commands in the Makefile: $ make help Makefile for developing and testing SecureDrop. Subcommands: - docs: Build project documentation in live reload for editing. - docs-lint: Check documentation for common syntax errors. - ci-spinup: Creates AWS EC2 hosts for testing staging environment. - ci-teardown: Destroy AWS EC2 hosts for testing staging environment. - ci-run: Provisions AWS EC2 hosts for testing staging environment. - ci-test: Tests AWS EC2 hosts for testing staging environment. - ci-go: Creates, provisions, tests, and destroys AWS EC2 hosts - for testing staging environment. - ci-debug: Prevents automatic destruction of AWS EC2 hosts on error. + ci-go Creates, provisions, tests, and destroys GCE host for testing staging environment. + ci-go-xenial Creates, provisions, tests, and destroys GCE host for testing staging environment under xenial. + ci-lint Runs linting in linting container. + ci-teardown Destroys GCE host for testing staging environment. To run the tests locally: .. code:: sh - make ci-debug # hosts will not be destroyed automatically make ci-go -You can use ``make ci-run`` to provision the remote hosts while making changes, -including rebuilding the Debian packages used in the Staging environment. -See :doc:`virtual_environments` for more information. - -Note that if you typed ``make ci-debug`` above, you will have to manually remove -a blank file in ``${HOME}/.FPF_CI_DEBUG`` and then run ``make ci-teardown`` to -bring down the CI environment. Otherwise, specifically for AWS, you will be -charged hourly charges until those machines are terminated. +You can use ``./devops/gce-nested/ci-runner.sh`` to provision the remote hosts +while making changes, including rebuilding the Debian packages used in the +Staging environment. See :doc:`virtual_environments` for more information. diff --git a/docs/development/upgrade_testing.rst b/docs/development/upgrade_testing.rst new file mode 100644 --- /dev/null +++ b/docs/development/upgrade_testing.rst @@ -0,0 +1,133 @@ +.. _upgrade_testing: + +Upgrade Testing using Molecule +============================== + +The SecureDrop project includes Molecule scenarios for developing and testing against +multi-server configurations, including a scenario to simulate the process of upgrading an +existing system. This document explains how to work with this scenario to test +features that make potentially release-breaking changes such as database +schema updates. + +The Molecule upgrade scenario sets up a predefined staging Securedrop virtual +environment using Vagrant boxes built with the latest application release. +It also creates a virtualized APT repository, and modifies +the SecureDrop environment to use this APT repository instead of the FPF main +repo at https://apt.freedom.press/. + +You can use this scenario to test the upgrade process, using using either +locally-built .debs or packages from the FPF test repo at +https://apt-test.freedom.press/. Both options are described below. + +.. note:: The upgrade scenario uses QEMU/KVM via Vagrant's libvirt provider, in + place of the default Virtualbox provider. If you haven't already done so, + you'll need to set up the libvirt provider before proceeding. For + more information, see :ref:`libvirt_provider`. + +.. _upgrade_testing_local: + +Upgrade testing using locally-built packages +-------------------------------------------- + +.. note:: + As of ``0.12.1``, the default platform for upgrade testing + boxes is Ubuntu Xenial 16.04. We no longer support upgrade boxes + based on Ubuntu Trusty 14.04. + +First, build the app code packages and create the environment: + +.. code:: sh + + make build-debs + make upgrade-start + +The playbook will return the source interface Onion address. You can use this to +check the application version displayed in the source interface footer. +Alternatively, you can log into the *Application Server* VM and check the deployed +package version directly: + +.. code:: sh + + molecule login -s upgrade -h app-staging + +From the *Application Server*: + +.. code:: sh + + apt-cache-policy securedrop-config + +The installed package version should match the latest release version. + +To perform an upgrade using the virtualized APT repository, log out of the +*Application Server* and run the Molecule side-effect action: + +.. code:: sh + + make upgrade-test-local + +This will upgrade the SecureDrop packages on the *Application* and +*Monitor Servers*, using your locally-built packages and apt VM instead of the +FPF production apt repository. + +You can verify that the application version has changed either by checking the +source interface's footer or directly on the *Application Server* as described +above. + +.. _upgrade_testing_apt: + +Upgrade testing using apt-test.freedom.press +-------------------------------------------- + +You can use the upgrade scenario to test upgrades using official release +candidate packages from the FPF test APT repository. First, +create the environment: + +.. code:: sh + + make upgrade-start-qa + +Then, log into the *Application Server*: + +.. code:: sh + + molecule login -s upgrade -h app-staging + +From the *Application Server*: + +.. code:: sh + + sudo apt-get update + apt-cache policy securedrop-config + +The installed package version should match the current release version. +To install the latest packages from the apt-test proxy: + +.. code:: sh + + make upgrade-test-qa + +Log back into the *Application Server*, and repeat the previous commands: + +.. code:: sh + + sudo apt-get update + apt-cache policy securedrop-config + +Navigate to the Source Interface URL again, and confirm you see the upgraded +version in the footer. Then proceed with testing the new version. + +Updating the base boxes used for upgrade testing +------------------------------------------------ + +When a new version of SecureDrop is released, we must create and upload +new VM images, to enable testing against that base version in future upgrade +testing. The procedure is as follows: + +1. ``make clean`` to remove any previous artifacts (which would also be pushed) +2. ``git checkout <version>`` (if a point release, ``git checkout develop``) +3. ``make vagrant-package`` +4. ``cd molecule/vagrant-packager && ./push.yml`` to upload to S3 +5. Commit the local changes to JSON files and open a PR. + +Subsequent invocations of ``make upgrade-start`` will pull the latest +version of the box. diff --git a/docs/images/printer_setup_guide/bad_test_page.png b/docs/images/printer_setup_guide/bad_test_page.png deleted file mode 100644 Binary files a/docs/images/printer_setup_guide/bad_test_page.png and /dev/null differ diff --git a/docs/images/printer_setup_guide/good_test_page.png b/docs/images/printer_setup_guide/good_test_page.png deleted file mode 100644 Binary files a/docs/images/printer_setup_guide/good_test_page.png and /dev/null differ diff --git a/docs/images/printer_setup_guide/print_test_page.png b/docs/images/printer_setup_guide/print_test_page.png deleted file mode 100644 Binary files a/docs/images/printer_setup_guide/print_test_page.png and /dev/null differ diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst --- a/docs/test_the_installation.rst +++ b/docs/test_the_installation.rst @@ -1,14 +1,16 @@ Test the Installation ===================== -Test connectivity +Test Connectivity ----------------- -SSH to both servers over Tor +SSH to Both Servers Over Tor ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -On the *Admin Workstation*, you should be able to SSH to the App -Server and the *Monitor Server*. :: +Assuming you haven't disabled ssh over tor, SSH access will be +restricted to the tor network. + +On the *Admin Workstation*, you should be able to SSH to the *Application Server* and the *Monitor Server*. :: ssh app ssh mon @@ -20,28 +22,21 @@ try using the verbose command format to troubleshoot: :: ssh <username>@<app .onion> ssh <username>@<mon .onion> -.. tip:: You can find the Onion URLs for SSH in ``app-ssh-aths`` and - ``mon-ssh-aths`` inside the ``install_files/ansible-base`` directory. +.. tip:: If your instance uses v2 onion services, you can find the Onion + URLs for SSH in ``app-ssh-aths`` and ``mon-ssh-aths`` inside the + ``install_files/ansible-base`` directory. If your instance uses v3 + onion services, check the ``app-ssh.auth_private`` and + ``mon-ssh.auth_private`` files instead. -Log in to both servers via TTY +Log in to Both Servers via TTY ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ All access to the SecureDrop servers should be performed over SSH from the -*Admin Workstation*. To aid in troubleshooting, physical logins via TTY are -supported, but require 2FA to be configured. See the :doc:`2FA setup guide -<servers_2fa>` for information how to enable console logins. - -Test the 2FA functionality by connecting a keyboard and display to each server, -then login with the Admin username. You will need: - -* sudo passphrase for the Admin username -* TOTP code from a 2FA app such as FreeOTP +*Admin Workstation*. To aid in troubleshooting, login via a physical keyboard +attached to the server is also supported. -Confirm that logging in via TTY prompts for a 2FA code, and that the code -generated by your smartphone app permits logging in to an interactive shell. - -Sanity-check the install ------------------------- +Sanity-Check the Installation +----------------------------- On each server: @@ -57,28 +52,27 @@ On each server: On the *Application Server*: #. Check the AppArmor status with ``sudo aa-status``. On a production - instance all profiles should be in enforce mode. + instance all profiles should be in ``enforce`` mode. -Test the web interfaces +Test the Web Interfaces ----------------------- -#. Make sure the Source Interface is available, and that you can make a +#. Make sure the *Source Interface* is available, and that you can make a submission. - - Do this by opening the Tor Browser and navigating to the onion - URL from ``app-source-ths``. Proceed through the codename - generation (copy this down somewhere) and you can submit a - message or attach any random unimportant file. + - Open the *Source Interface* in the Tor Browser by clicking on its desktop + shortcut. Proceed through the codename + generation (copy this down somewhere) and submit a + test message or file. - Usage of the Source Interface is covered by our :doc:`Source User Manual <source>`. -#. Test that you can access the Journalist Interface, and that you can log +#. Test that you can access the *Journalist Interface*, and that you can log in as the admin user you just created. - - Open the Tor Browser and navigate to the onion URL from - app-journalist-aths. Enter your passphrase and two-factor - authentication code to log in. - - If you have problems logging in to the Admin/Journalist Interface, + - Open the *Journalist Interface* in the Tor Browser by clicking on its desktop + shortcut. Enter your passphrase and two-factor code to log in. + - If you have problems logging in to the *Admin/Journalist Interface*, SSH to the *Application Server* and restart the ntp daemon to synchronize the time: ``sudo service ntp restart``. Also check that your smartphone's time is accurate and set to network time in its @@ -88,22 +82,19 @@ Test the web interfaces - While logged in as an admin, you can send a reply to the test source submission you made earlier. - - Usage of the Journalist Interface is covered by our :doc:`Journalist + - Usage of the *Journalist Interface* is covered by our :doc:`Journalist User Manual <journalist>`. #. Test that the source received the reply. - - Within Tor Browser, navigate back to the app-source-ths URL and + - Within Tor Browser, navigate back to the *Source Interface* and use your previous test source codename to log in (or reload the page if it's still open) and check that the reply you just made is present. -#. We highly recommend that you create persistent bookmarks for the - Source and Journalist Interface addresses within Tor Browser. - #. Remove the test submissions you made prior to putting SecureDrop to - real use. On the main Journalist Interface page, select all sources and - click 'Delete selected'. + real use. On the main *Journalist Interface* page, select all sources and + click **Delete selected**. Once you've tested the installation and verified that everything is working, see :doc:`How to Use SecureDrop <journalist>`. diff --git a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml deleted file mode 100644 --- a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- -- name: Install pip dependencies for running the unit and functional tests. - pip: - requirements: "{{ test_pip_requirements }}" - tags: - - pip - -- name: Install testing package dependencies. - apt: - name: "{{ item }}" - state: present - with_items: "{{ test_apt_dependencies }}" - tags: - - apt - -- name: Copy xvfb init script. - copy: - src: xvfb - dest: /etc/init.d/xvfb - owner: root - mode: "0700" - tags: - - xvfb - - permissions - -- name: Update rc.d to run xvfb at boot. - command: update-rc.d xvfb defaults - register: xvfb_setup - changed_when: "'System start/stop links for /etc/init.d/xvfb already exist' not in xvfb_setup.stdout" - notify: start xvfb - tags: - - xvfb - -- name: Set DISPLAY environment variable for xvfb. - copy: - src: xvfb_display.sh - dest: /etc/profile.d/xvfb_display.sh - owner: root - mode: "0444" - tags: - - xvfb - - environment - - permissions diff --git a/install_files/ansible-base/roles/app-test/tasks/extract_apptor_test_config.yml b/install_files/ansible-base/roles/app-test/tasks/extract_apptor_test_config.yml new file mode 100644 --- /dev/null +++ b/install_files/ansible-base/roles/app-test/tasks/extract_apptor_test_config.yml @@ -0,0 +1,38 @@ +--- + +- name: Create ansible fact directory + file: + path: /etc/ansible/facts.d + recurse: yes + state: directory + owner: root + group: root + +- name: Copy over tor application fact file + copy: + src: tor_app.fact + dest: /etc/ansible/facts.d/tor_app.fact + mode: 0755 + +- name: Refresh remote "local facts" for glory + setup: + filter: ansible_local + +- name: Gather apptest facts to dict to prepare for output + set_fact: + _tbb_selenium_dict: + hidserv_token: "{{ ansible_local.tor_app.hidserv_token }}" + journalist_location: "{{ ansible_local.tor_app.journalist_location }}" + source_location: "{{ ansible_local.tor_app.source_location }}" + timeout: "{{ tbb_timeout }}" + user: + name: "{{ tbb_selenium_user }}" + password: "{{ tbb_selenium_password }}" + secret: "{{ tbb_selenium_totp_secret }}" + +- name: Dump facts to local json for in-take + copy: + content: "{{ _tbb_selenium_dict | to_nice_json }}" + dest: "{{ tbb_funcfolder }}" + become: no + delegate_to: localhost diff --git a/install_files/ansible-base/roles/ossec/files/test_admin_key.pub b/install_files/ansible-base/roles/ossec/files/test_admin_key.pub Binary files a/install_files/ansible-base/roles/ossec/files/test_admin_key.pub and b/install_files/ansible-base/roles/ossec/files/test_admin_key.pub differ diff --git a/install_files/ansible-base/roles/ossec/files/test_admin_key.sec b/install_files/ansible-base/roles/ossec/files/test_admin_key.sec Binary files a/install_files/ansible-base/roles/ossec/files/test_admin_key.sec and b/install_files/ansible-base/roles/ossec/files/test_admin_key.sec differ diff --git a/securedrop/tests/pages-layout/__init__.py b/install_files/securedrop-app-code/debian/templates similarity index 100% rename from securedrop/tests/pages-layout/__init__.py rename to install_files/securedrop-app-code/debian/templates diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py new file mode 100644 --- /dev/null +++ b/journalist_gui/test_gui.py @@ -0,0 +1,263 @@ +import unittest +import subprocess +import pexpect +import pytest +from unittest import mock +from unittest.mock import MagicMock +from PyQt5.QtCore import Qt +from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog) +from PyQt5.QtTest import QTest + +from journalist_gui.SecureDropUpdater import UpdaterApp, strings, FLAG_LOCATION +from journalist_gui.SecureDropUpdater import prevent_second_instance + + [email protected]('journalist_gui.SecureDropUpdater.sys.exit') [email protected]('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') +class TestSecondInstancePrevention(unittest.TestCase): + def setUp(self): + self.mock_app = mock.MagicMock() + self.mock_app.applicationName = mock.MagicMock(return_value='sd') + + @staticmethod + def socket_mock_generator(already_bound_errno=98): + namespace = set() + + def kernel_bind(addr): + if addr in namespace: + error = OSError() + error.errno = already_bound_errno + raise error + else: + namespace.add(addr) + + socket_mock = mock.MagicMock() + socket_mock.socket().bind = mock.MagicMock(side_effect=kernel_bind) + return socket_mock + + def test_diff_name(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator() + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name2') + + mock_exit.assert_not_called() + + def test_same_name(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator() + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name1') + + mock_exit.assert_any_call() + + def test_unknown_kernel_error(self, mock_msgbox, mock_exit): + mock_socket = self.socket_mock_generator(131) # crazy unexpected error + with mock.patch('journalist_gui.SecureDropUpdater.socket', new=mock_socket): + with pytest.raises(OSError): + prevent_second_instance(self.mock_app, 'name1') + prevent_second_instance(self.mock_app, 'name1') + + +class AppTestCase(unittest.TestCase): + def setUp(self): + qApp = QApplication.instance() + if qApp is None: + self.app = QApplication(['']) + else: + self.app = qApp + + +class WindowTestCase(AppTestCase): + def setUp(self): + super(WindowTestCase, self).setUp() + self.window = UpdaterApp() + self.window.show() + QTest.qWaitForWindowExposed(self.window) + + def test_window_is_a_fixed_size(self): + # Verify the size policy is fixed + expected_sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed) + assert self.window.sizePolicy() == expected_sizePolicy + + # Verify the maximum and minimum sizes are the same as the current size + current_size = self.window.size() + assert self.window.minimumSize() == current_size + assert self.window.maximumSize() == current_size + + def test_clicking_install_later_exits_the_application(self): + QTest.mouseClick(self.window.pushButton, Qt.LeftButton) + self.assertFalse(self.window.isVisible()) + + def test_progress_bar_begins_at_zero(self): + self.assertEqual(self.window.progressBar.value(), 0) + + def test_default_tab(self): + self.assertEqual(self.window.tabWidget.currentIndex(), 0) + + def test_output_tab(self): + + tab = self.window.tabWidget.tabBar() + QTest.mouseClick(tab, Qt.LeftButton) + self.assertEqual(self.window.tabWidget.currentIndex(), + self.window.tabWidget.indexOf(self.window.tab_2)) + + @mock.patch('subprocess.check_output', + return_value=b'Python dependencies for securedrop-admin') + def test_setupThread(self, check_output): + with mock.patch.object(self.window, "call_tailsconfig", + return_value=MagicMock()): + with mock.patch('builtins.open') as mock_open: + self.window.setup_thread.run() # Call run directly + + mock_open.assert_called_once_with(FLAG_LOCATION, 'a') + self.assertEqual(self.window.update_success, True) + self.assertEqual(self.window.progressBar.value(), 70) + + @mock.patch('subprocess.check_output', + return_value=b'Failed to install pip dependencies') + def test_setupThread_failure(self, check_output): + with mock.patch.object(self.window, "call_tailsconfig", + return_value=MagicMock()): + with mock.patch('builtins.open') as mock_open: + self.window.setup_thread.run() # Call run directly + + mock_open.assert_called_once_with(FLAG_LOCATION, 'a') + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.progressBar.value(), 0) + self.assertEqual(self.window.failure_reason, + strings.update_failed_generic_reason) + + @mock.patch('subprocess.check_output', + return_value=b'Signature verification successful') + def test_updateThread(self, check_output): + with mock.patch.object(self.window, "setup_thread", + return_value=MagicMock()): + self.window.update_thread.run() # Call run directly + self.assertEqual(self.window.update_success, True) + self.assertEqual(self.window.progressBar.value(), 50) + + @mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError( + 1, 'cmd', b'Signature verification failed')) + def test_updateThread_failure(self, check_output): + with mock.patch.object(self.window, "setup_thread", + return_value=MagicMock()): + self.window.update_thread.run() # Call run directly + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.update_failed_sig_failure) + + @mock.patch('subprocess.check_output', + side_effect=subprocess.CalledProcessError( + 1, 'cmd', b'Generic other failure')) + def test_updateThread_generic_failure(self, check_output): + with mock.patch.object(self.window, "setup_thread", + return_value=MagicMock()): + self.window.update_thread.run() # Call run directly + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.update_failed_generic_reason) + + def test_get_sudo_password_when_password_provided(self): + expected_password = "password" + + with mock.patch.object(QInputDialog, 'getText', + return_value=[expected_password, True]): + sudo_password = self.window.get_sudo_password() + + self.assertEqual(sudo_password, expected_password) + + def test_get_sudo_password_when_password_not_provided(self): + test_password = "" + + with mock.patch.object(QInputDialog, 'getText', + return_value=[test_password, False]): + self.assertIsNone(self.window.get_sudo_password()) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_no_failures(self, pt): + child = pt() + before = MagicMock() + + before.decode.side_effect = ["SUDO: ", "Update successful. failed=0"] + child.before = before + child.exitstatus = 0 + with mock.patch('os.remove') as mock_remove: + self.window.tails_thread.run() + + mock_remove.assert_called_once_with(FLAG_LOCATION) + self.assertIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, True) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_generic_failure(self, pt): + child = pt() + before = MagicMock() + before.decode.side_effect = ["SUDO: ", "failed=10 ERROR!!!!!"] + child.before = before + self.window.tails_thread.run() + self.assertNotIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.tailsconfig_failed_generic_reason) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_sudo_password_is_wrong(self, pt): + child = pt() + before = MagicMock() + before.decode.side_effect = ["some data", + pexpect.exceptions.TIMEOUT(1)] + child.before = before + self.window.tails_thread.run() + self.assertNotIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.tailsconfig_failed_sudo_password) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_some_other_subprocess_error(self, pt): + child = pt() + before = MagicMock() + before.decode.side_effect = subprocess.CalledProcessError( + 1, 'cmd', b'Generic other failure') + child.before = before + self.window.tails_thread.run() + self.assertNotIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.tailsconfig_failed_generic_reason) + + def test_tails_status_success(self): + result = {'status': True, "output": "successful.", + 'failure_reason': ''} + + with mock.patch('os.remove') as mock_remove: + self.window.tails_status(result) + + # We do remove the flag file if the update does finish + mock_remove.assert_called_once_with(FLAG_LOCATION) + self.assertEqual(self.window.progressBar.value(), 100) + + def test_tails_status_failure(self): + result = {'status': False, "output": "successful.", + 'failure_reason': '42'} + + with mock.patch('os.remove') as mock_remove: + self.window.tails_status(result) + + # We do not remove the flag file if the update does not finish + mock_remove.assert_not_called() + self.assertEqual(self.window.progressBar.value(), 0) + + @mock.patch('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') + def test_no_update_without_password(self, mock_msgbox): + with mock.patch('journalist_gui.SecureDropUpdater.password_is_set', + return_value=False): + self.window.update_securedrop() + self.assertEqual(self.window.pushButton.isEnabled(), True) + self.assertEqual(self.window.pushButton_2.isEnabled(), False) + + +if __name__ == '__main__': + unittest.main() diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_play_configuration.py similarity index 81% rename from molecule/ansible-config/tests/test_max_fail_percentage.py rename to molecule/ansible-config/tests/test_play_configuration.py --- a/molecule/ansible-config/tests/test_max_fail_percentage.py +++ b/molecule/ansible-config/tests/test_play_configuration.py @@ -1,5 +1,5 @@ import os - +import io import pytest import yaml @@ -28,8 +28,8 @@ def find_ansible_playbooks(): for f in os.listdir(ANSIBLE_BASE): # Assume all YAML files in directory are playbooks. if f.endswith(".yml"): - # Ignore deprecated production vars file. - if f != "prod-specific.yml": + # Ignore files that are deprecated or require test exceptions + if f not in ["prod-specific.yml", "build-deb-pkgs.yml"]: playbooks.append(os.path.join(ANSIBLE_BASE, f)) # Sanity checking to make sure list of playbooks is not empty. assert len(playbooks) > 0 @@ -55,7 +55,7 @@ def test_max_fail_percentage(host, playbook): the parameter, but we'll play it safe and require it everywhere, to avoid mistakes down the road. """ - with open(playbook, 'r') as f: + with io.open(playbook, 'r') as f: playbook_yaml = yaml.safe_load(f) # Descend into playbook list structure to validate play attributes. for play in playbook_yaml: @@ -71,10 +71,23 @@ def test_any_errors_fatal(host, playbook): to "0", doing so ensures that any errors will cause an immediate failure on the playbook. """ - with open(playbook, 'r') as f: + with io.open(playbook, 'r') as f: playbook_yaml = yaml.safe_load(f) # Descend into playbook list structure to validate play attributes. for play in playbook_yaml: assert 'any_errors_fatal' in play # Ansible coerces booleans, so bare assert is sufficient assert play['any_errors_fatal'] + + [email protected]('playbook', find_ansible_playbooks()) +def test_locale(host, playbook): + """ + The securedrop-prod and securedrop-staging playbooks should + control the locale in the host environment by setting LC_ALL=C. + """ + with io.open(os.path.join(ANSIBLE_BASE, playbook), 'r') as f: + playbook_yaml = yaml.safe_load(f) + for play in playbook_yaml: + assert 'environment' in play + assert play['environment']['LC_ALL'] == 'C' diff --git a/molecule/aws/tests/test_tor_interfaces.py b/molecule/aws/tests/test_tor_interfaces.py deleted file mode 100644 --- a/molecule/aws/tests/test_tor_interfaces.py +++ /dev/null @@ -1,33 +0,0 @@ -import os -import re -import pytest - -TOR_URL_FILES = [{'file': 'app-source-ths', - 'check_string': 'SUBMIT DOCUMENTS', - 'error_string': "ERROR"}] - -testinfra_hosts = ["docker://apptestclient"] - [email protected]('site', TOR_URL_FILES) -def test_www(host, site): - """ - Ensure tor interface is reachable and returns expected content. - """ - - # Extract Onion URL from saved onion file, fetched back from app-staging. - onion_url_filepath = os.path.join( - os.path.dirname(__file__), - "../../../install_files/ansible-base/{}".format(site['file']) - ) - onion_url_raw = open(onion_url_filepath, 'ro').read() - onion_url = re.search("\w+\.onion", onion_url_raw).group() - - # Fetch Onion URL via curl to confirm interface is rendered correctly. - curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( - onion_url) - curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) - - site_scrape = host.check_output(curl_tor) - assert host.check_output(curl_tor_status) == "200" - assert site['check_string'] in site_scrape - assert site['error_string'] not in site_scrape diff --git a/molecule/aws/tor_apt_test.yml b/molecule/aws/tor_apt_test.yml deleted file mode 100644 --- a/molecule/aws/tor_apt_test.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- -- name: Add apt SD test public key - apt_key: - data: "{{ lookup('file','securedrop_test.pub') }}" - state: present - -- name: Temporary fix for GH issue 2938 - file: - state: absent - path: "/etc/apt/sources.list.d/tor_apt_freedom_press.list" - -- name: Switch apt repo URLs to staging. - replace: - dest: "/etc/apt/sources.list.d/tor.apt.freedom.press.list" - replace: "tor-apt-test.freedom.press" - regexp: '//tor-apt\.freedom\.press' - ignore_errors: "yes" - notify: update tor - -- name: Force possible tor update - meta: flush_handlers - -- name: Squash testinfra failure for packages needing update - apt: - upgrade: safe - -- name: Extract latest tor version - shell: | - apt-cache policy tor | sed -e 's/^\s*Installed:\ \(\S*\)/\1/g;tx;d;:x' - changed_when: false - register: extract_tor_version - -- name: Dump Tor version to file (for reporting) - copy: - dest: "{{ playbook_dir }}/../../.tor_version" - content: "{{ extract_tor_version.stdout }}" - delegate_to: localhost - run_once: true - become: "no" diff --git a/molecule/builder-xenial/tests/conftest.py b/molecule/builder-xenial/tests/conftest.py new file mode 100644 --- /dev/null +++ b/molecule/builder-xenial/tests/conftest.py @@ -0,0 +1,22 @@ +""" +Import variables from vars.yml and inject into pytest namespace +""" + +import os +import io +import yaml + + +def pytest_namespace(): + """ Return dict of vars imported as 'securedrop_test_vars' into pytest + global namespace + """ + filepath = os.path.join(os.path.dirname(__file__), "vars.yml") + with io.open(filepath, 'r') as f: + securedrop_test_vars = yaml.safe_load(f) + + # Tack on target OS for use in tests + securedrop_target_platform = os.environ.get("SECUREDROP_TARGET_PLATFORM") + securedrop_test_vars["securedrop_target_platform"] = securedrop_target_platform + # Wrapping the return value to accommodate for pytest namespacing + return dict(securedrop_test_vars=securedrop_test_vars) diff --git a/molecule/builder-xenial/tests/test_build_dependencies.py b/molecule/builder-xenial/tests/test_build_dependencies.py new file mode 100644 --- /dev/null +++ b/molecule/builder-xenial/tests/test_build_dependencies.py @@ -0,0 +1,40 @@ +import pytest +import os + + +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") +testinfra_hosts = [ + "docker://{}-sd-app".format(SECUREDROP_TARGET_PLATFORM) +] + + +def test_sass_gem_installed(host): + """ + Ensure the `sass` Ruby gem is installed, for compiling SASS to CSS. + """ + c = host.run("gem list") + assert "sass (3.4.23)" in c.stdout + assert c.rc == 0 + + +def test_pip_dependencies_installed(host): + """ + Ensure the development pip dependencies are installed + """ + c = host.run("pip3 list installed") + assert "Flask-Babel" in c.stdout + assert c.rc == 0 + + [email protected](reason="This check conflicts with the concept of pegging" + "dependencies") +def test_build_all_packages_updated(host): + """ + Ensure a dist-upgrade has already been run, by checking that no + packages are eligible for upgrade currently. This will ensure that + all upgrades, security and otherwise, have been applied to the VM + used to build packages. + """ + c = host.run('aptitude --simulate -y dist-upgrade') + assert c.rc == 0 + assert "No packages will be installed, upgraded, or removed." in c.stdout diff --git a/molecule/builder/tests/test_legacy_paths.py b/molecule/builder-xenial/tests/test_legacy_paths.py similarity index 85% rename from molecule/builder/tests/test_legacy_paths.py rename to molecule/builder-xenial/tests/test_legacy_paths.py --- a/molecule/builder/tests/test_legacy_paths.py +++ b/molecule/builder-xenial/tests/test_legacy_paths.py @@ -9,7 +9,7 @@ '/tmp/build-securedrop-ossec-agent', '/tmp/build-securedrop-ossec-server', ]) -def test_build_ossec_apt_dependencies(File, build_path): +def test_build_ossec_apt_dependencies(host, build_path): """ Ensure that unwanted build paths are absent. Most of these were created as unwanted side-effects during CI-related changes to the build scripts. @@ -17,4 +17,4 @@ def test_build_ossec_apt_dependencies(File, build_path): All paths are rightly considered "legacy" and should never be present on the build host. This test is strictly for guarding against regressions. """ - assert not File(build_path).exists + assert not host.file(build_path).exists diff --git a/molecule/builder-xenial/tests/test_securedrop_deb_package.py b/molecule/builder-xenial/tests/test_securedrop_deb_package.py new file mode 100644 --- /dev/null +++ b/molecule/builder-xenial/tests/test_securedrop_deb_package.py @@ -0,0 +1,488 @@ +import pytest +import os +import re +import tempfile + + +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") +testinfra_hosts = [ + "docker://{}-sd-dpkg-verification".format(SECUREDROP_TARGET_PLATFORM) +] +securedrop_test_vars = pytest.securedrop_test_vars + + +def extract_package_name_from_filepath(filepath): + """ + Helper function to infer intended package name from + the absolute filepath, using a rather garish regex. + E.g., given: + securedrop-ossec-agent-2.8.2+0.3.10-amd64.deb + + retuns: + + securedrop-ossec-agent + + which can then be used for comparisons in dpkg output. + """ + deb_basename = os.path.basename(filepath) + package_name = re.search(r'^([a-z\-]+(?!\d))', deb_basename).groups()[0] + assert deb_basename.startswith(package_name) + return package_name + + +def get_deb_packages(): + """ + Helper function to retrieve module-namespace test vars and format + the strings to interpolate version info. Keeps the test vars DRY + in terms of version info, and required since we can't rely on + Jinja-based evaluation of the YAML files (so we can't trivially + reuse vars in other var values, as is the case with Ansible). + """ + substitutions = dict( + securedrop_version=securedrop_test_vars.securedrop_version, + ossec_version=securedrop_test_vars.ossec_version, + keyring_version=securedrop_test_vars.keyring_version, + config_version=securedrop_test_vars.config_version, + grsec_version=securedrop_test_vars.grsec_version, + securedrop_target_platform=securedrop_test_vars.securedrop_target_platform, + ) + + deb_packages = [d.format(**substitutions) for d + in securedrop_test_vars.build_deb_packages] + return deb_packages + + +deb_packages = get_deb_packages() + + +def get_deb_tags(): + """ + Helper function to build array of package and tag tuples + for lintian. + """ + deb_tags = [] + + for deb in get_deb_packages(): + for tag in securedrop_test_vars.lintian_tags: + deb_tags.append((deb, tag)) + + return deb_tags + + +deb_tags = get_deb_tags() + + [email protected]("deb", deb_packages) +def test_build_deb_packages(host, deb): + """ + Sanity check the built Debian packages for Control field + values and general package structure. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + assert deb_package.is_file + + [email protected]("deb", deb_packages) +def test_deb_packages_appear_installable(host, deb): + """ + Confirms that a dry-run of installation reports no errors. + Simple check for valid Debian package structure, but not thorough. + When run on a malformed package, `dpkg` will report: + + dpkg-deb: error: `foo.deb' is not a debian format archive + + Testing application behavior is left to the functional tests. + """ + + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + deb_basename = os.path.basename(deb_package.path) + package_name = extract_package_name_from_filepath(deb_package.path) + assert deb_basename.startswith(package_name) + + # sudo is required to call `dpkg --install`, even as dry-run. + with host.sudo(): + c = host.run("dpkg --install --dry-run {}".format(deb_package.path)) + assert "Selecting previously unselected package {}".format( + package_name) in c.stdout + regex = "Preparing to unpack [./]+{} ...".format( + re.escape(deb_basename)) + assert re.search(regex, c.stdout, re.M) + assert c.rc == 0 + + [email protected]("deb", deb_packages) +def test_deb_package_control_fields(host, deb): + """ + Ensure Debian Control fields are populated as expected in the package. + These checks are rather superficial, and don't actually confirm that the + .deb files are not broken. At a later date, consider integration tests + that actually use these built files during an Ansible provisioning run. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + package_name = extract_package_name_from_filepath(deb_package.path) + # The `--field` option will display all fields if none are specified. + c = host.run("dpkg-deb --field {}".format(deb_package.path)) + + assert "Maintainer: SecureDrop Team <[email protected]>" in c.stdout + # The securedrop-config package is architecture indepedent + if package_name == "securedrop-config": + assert "Architecture: all" in c.stdout + else: + assert "Architecture: amd64" in c.stdout + + assert "Package: {}".format(package_name) in c.stdout + assert c.rc == 0 + + [email protected]("deb", deb_packages) +def test_deb_package_control_fields_homepage(host, deb): + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + # The `--field` option will display all fields if none are specified. + c = host.run("dpkg-deb --field {}".format(deb_package.path)) + # The OSSEC source packages will have a different homepage; + # all other packages should set securedrop.org as homepage. + if os.path.basename(deb_package.path).startswith('ossec-'): + assert "Homepage: http://ossec.net" in c.stdout + else: + assert "Homepage: https://securedrop.org" in c.stdout + + [email protected]("deb", deb_packages) +def test_deb_package_contains_no_config_file(host, deb): + """ + Ensures the `securedrop-app-code` package does not ship a `config.py` + file. Doing so would clobber the site-specific changes made via Ansible. + + Somewhat lazily checking all deb packages, rather than just the app-code + package, but it accomplishes the same in a DRY manner. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + assert not re.search(r"^ ./var/www/securedrop/config.py$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_deb_package_contains_pot_file(host, deb): + """ + Ensures the `securedrop-app-code` package has the + messages.pot file + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + assert re.search("^.*messages.pot$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_deb_package_contains_mo_file(host, deb): + """ + Ensures the `securedrop-app-code` package has at least one + compiled mo file. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + assert re.search(r"^.*messages\.mo$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_deb_package_contains_no_generated_assets(host, deb): + """ + Ensures the `securedrop-app-code` package does not ship minified + static assets, which are built automatically via Flask-Assets, and + may be present in the source directory used to build from. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # static/gen/ directory should exist + assert re.search(r"^.*\./var/www/securedrop" + "/static/gen/$", c.stdout, re.M) + # static/gen/ directory should be empty + assert not re.search(r"^.*\./var/www/securedrop" + "/static/gen/.+$", c.stdout, re.M) + + # static/.webassets-cache/ directory should exist + assert re.search(r"^.*\./var/www/securedrop" + "/static/.webassets-cache/$", c.stdout, re.M) + # static/.webassets-cache/ directory should be empty + assert not re.search(r"^.*\./var/www/securedrop" + "/static/.webassets-cache/.+$", c.stdout, re.M) + + # no SASS files should exist; only the generated CSS files. + assert not re.search("^.*sass$", c.stdout, re.M) + + # no .map files should exist; only the generated CSS files. + assert not re.search("^.*css.map$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_deb_package_contains_expected_conffiles(host, deb): + """ + Ensures the `securedrop-app-code` package declares only whitelisted + `conffiles`. Several files in `/etc/` would automatically be marked + conffiles, which would break unattended updates to critical package + functionality such as AppArmor profiles. This test validates overrides + in the build logic to unset those conffiles. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + # For the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + tmpdir = tempfile.mkdtemp() + # The `--raw-extract` flag includes `DEBIAN/` dir with control files + host.run("dpkg-deb --raw-extract {} {}".format(deb, tmpdir)) + conffiles_path = os.path.join(tmpdir, "DEBIAN", "conffiles") + f = host.file(conffiles_path) + + assert f.is_file + # Ensure that the entirety of the file lists only the logo as conffile; + # effectively ensures e.g. AppArmor profiles are not conffiles. + conffiles = f.content_string.rstrip() + assert conffiles == "/var/www/securedrop/static/i/logo.png" + + # For the securedrop-config package, we want to ensure there are no + # conffiles so securedrop_additions.sh is squashed every time + if "securedrop-config" in deb_package.path: + c = host.run("dpkg-deb -I {}".format(deb)) + assert "conffiles" not in c.stdout + + [email protected]("deb", deb_packages) +def test_deb_package_contains_css(host, deb): + """ + Ensures the `securedrop-app-code` package contains files that + are generated during the `sass` build process. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + + for css_type in ['journalist', 'source']: + assert re.search(r"^.*\./var/www/securedrop/static/" + "css/{}.css$".format(css_type), c.stdout, re.M) + + [email protected]("deb, tag", deb_tags) +def test_deb_package_lintian(host, deb, tag): + """ + Ensures lintian likes our Debian packages. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + c = host.run("lintian --tags {} --no-tag-display-limit {}".format( + tag, deb_package.path)) + assert len(c.stdout) == 0 + + [email protected]("deb", deb_packages) +def test_deb_app_package_contains_https_validate_dir(host, deb): + """ + Ensures the `securedrop-app-code` package ships with a validation + '.well-known/pki-validation' directory + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # well-known/pki-validation directory should exist + assert re.search(r"^.*\./var/www/securedrop/" + ".well-known/pki-validation/$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_grsec_metapackage(host, deb): + """ + Sanity checks on the securedrop-grsec metapackage. Mostly checks + for presence of PaX flags hook and sysctl settings. + Does not validate file contents, just presence. + """ + + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + if "securedrop-grsec" in deb_package.path: + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # Custom sysctl options should be present + assert re.search(r"^.*\./etc/sysctl.d/30-securedrop.conf$", + c.stdout, re.M) + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # Post-install kernel hook for managing PaX flags must exist. + assert re.search(r"^.*\./etc/kernel/postinst.d/paxctl-grub$", + c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_control_helper_files_are_present(host, deb): + """ + Inspect the package info to get a list of helper scripts + that should be shipped with the package, e.g. postinst, prerm, etc. + Necessary due to package build logic retooling. + + Example output from package info, for reference: + + $ dpkg-deb --info securedrop-app-code_0.12.0~rc1_amd64.deb + new debian package, version 2.0. + size 13583186 bytes: control archive=11713 bytes. + 62 bytes, 2 lines conffiles + 657 bytes, 10 lines control + 26076 bytes, 298 lines md5sums + 5503 bytes, 159 lines * postinst #!/bin/bash + + Note that the actual output will have trailing whitespace, removed + from this text description to satisfy linters. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + # Only relevant for the securedrop-app-code package: + if "securedrop-app-code" in deb_package.path: + wanted_files = [ + "conffiles", + "config", + "control", + "postinst", + "postrm", + "preinst", + "prerm", + "templates", + ] + c = host.run("dpkg-deb --info {}".format(deb_package.path)) + for wanted_file in wanted_files: + assert re.search(r"^\s+?\d+ bytes,\s+\d+ lines[\s*]+"+wanted_file+r"\s+.*$", + c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_jinja_files_not_present(host, deb): + """ + Make sure that jinja (.j2) files were not copied over + as-is into the debian packages. + """ + + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + # There shouldn't be any files with a .j2 ending + assert not re.search(r"^.*\.j2$", c.stdout, re.M) + + [email protected]("deb", deb_packages) +def test_ossec_binaries_are_present_agent(host, deb): + """ + Inspect the package contents to ensure all ossec agent binaries are properly + included in the package. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.ossec_version)) + # Only relevant for the ossec-agent package and not securedrop-ossec-agent: + if "ossec-agent" in deb_package.path and "securedrop" not in deb_package.path: + wanted_files = [ + "/var/ossec/bin/agent-auth", + "/var/ossec/bin/ossec-syscheckd", + "/var/ossec/bin/ossec-agentd", + "/var/ossec/bin/manage_agents", + "/var/ossec/bin/ossec-lua", + "/var/ossec/bin/ossec-control", + "/var/ossec/bin/ossec-luac", + "/var/ossec/bin/ossec-logcollector", + "/var/ossec/bin/util.sh", + "/var/ossec/bin/ossec-execd", + ] + c = host.run("dpkg-deb -c {}".format(deb_package.path)) + for wanted_file in wanted_files: + assert wanted_file in c.stdout + + [email protected]("deb", deb_packages) +def test_ossec_binaries_are_present_server(host, deb): + """ + Inspect the package contents to ensure all ossec server binaries are properly + included in the package. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.ossec_version)) + # Only relevant for the ossec-agent package and not securedrop-ossec-agent: + if "ossec-server" in deb_package.path and "securedrop" not in deb_package.path: + wanted_files = [ + "/var/ossec/bin/ossec-maild", + "/var/ossec/bin/ossec-remoted", + "/var/ossec/bin/ossec-syscheckd", + "/var/ossec/bin/ossec-makelists", + "/var/ossec/bin/ossec-logtest", + "/var/ossec/bin/syscheck_update", + "/var/ossec/bin/ossec-reportd", + "/var/ossec/bin/ossec-agentlessd", + "/var/ossec/bin/manage_agents", + "/var/ossec/bin/ossec-lua", + "/var/ossec/bin/rootcheck_control", + "/var/ossec/bin/ossec-control", + "/var/ossec/bin/ossec-dbd", + "/var/ossec/bin/ossec-csyslogd", + "/var/ossec/bin/ossec-regex", + "/var/ossec/bin/ossec-luac", + "/var/ossec/bin/agent_control", + "/var/ossec/bin/ossec-monitord", + "/var/ossec/bin/clear_stats", + "/var/ossec/bin/ossec-logcollector", + "/var/ossec/bin/list_agents", + "/var/ossec/bin/verify-agent-conf", + "/var/ossec/bin/syscheck_control", + "/var/ossec/bin/util.sh", + "/var/ossec/bin/ossec-analysisd", + "/var/ossec/bin/ossec-execd", + "/var/ossec/bin/ossec-authd", + ] + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + for wanted_file in wanted_files: + assert wanted_file in c.stdout + + [email protected]("deb", deb_packages) +def test_config_package_contains_expected_files(host, deb): + """ + Inspect the package contents to ensure all config files are included in + the package. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + if "securedrop-config" in deb_package.path: + wanted_files = [ + "/etc/cron-apt/action.d/9-remove", + "/etc/profile.d/securedrop_additions.sh", + ] + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + for wanted_file in wanted_files: + assert wanted_file in c.stdout + + [email protected]("deb", deb_packages) +def test_app_package_does_not_contain_custom_logo(host, deb): + """ + Inspect the package contents to ensure custom_logo.png is not present. This + is because custom_logo.png superceeds logo.png. + """ + deb_package = host.file(deb.format( + securedrop_test_vars.securedrop_version)) + if "securedrop-app-code" in deb_package.path: + c = host.run("dpkg-deb --contents {}".format(deb_package.path)) + assert "/var/www/static/i/custom_logo.png" not in c.stdout diff --git a/molecule/builder-xenial/tests/test_security_updates.py b/molecule/builder-xenial/tests/test_security_updates.py new file mode 100644 --- /dev/null +++ b/molecule/builder-xenial/tests/test_security_updates.py @@ -0,0 +1,25 @@ +import os +SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") +testinfra_hosts = [ + "docker://{}-sd-sec-update".format(SECUREDROP_TARGET_PLATFORM) +] + + +def test_ensure_no_updates_avail(host): + """ + Test to make sure that there are no security-updates in the + base builder container. + """ + + # Filter out all the security repos to their own file + # without this change all the package updates appeared as if they were + # coming from normal ubuntu update channel (since they get posted to both) + host.run('egrep "^deb.*security" /etc/apt/sources.list > /tmp/sec.list') + + dist_upgrade_simulate = host.run('apt-get -s dist-upgrade ' + '-oDir::Etc::Sourcelist=/tmp/sec.list ' + '|grep "^Inst" |grep -i security') + + # If the grep was successful that means security package updates found + # otherwise we get a non-zero exit code so no updates needed. + assert dist_upgrade_simulate.rc != 0 diff --git a/molecule/builder/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml similarity index 79% rename from molecule/builder/tests/vars.yml rename to molecule/builder-xenial/tests/vars.yml --- a/molecule/builder/tests/vars.yml +++ b/molecule/builder-xenial/tests/vars.yml @@ -1,15 +1,16 @@ --- -securedrop_version: "0.5.2" -ossec_version: "2.8.2" -keyring_version: "0.1.1" -config_version: "0.1.0" +securedrop_version: "1.2.2" +ossec_version: "3.0.0" +keyring_version: "0.1.3" +config_version: "0.1.3" +grsec_version: "4.14.154" # These values will be interpolated with values populated above # via helper functions in the tests. build_directories: # The build scripts for securedrop-app-code run separate from the others, # i.e. lacking the `/tmp/build` pardir. - - /tmp/securedrop-app-code-{securedrop_version}-amd64/ + - /tmp/securedrop-app-code-{securedrop_version}_amd64/ - /tmp/build/securedrop-keyring-{keyring_version}+{securedrop_version}-amd64/ - /tmp/build/securedrop-config-{config_version}+{securedrop_version}-amd64/ - /tmp/build/securedrop-ossec-agent-{ossec_version}+{securedrop_version}-amd64/ @@ -19,13 +20,14 @@ build_directories: - /tmp/build build_deb_packages: - - /tmp/securedrop-app-code-{securedrop_version}-amd64.deb + - /tmp/build/securedrop-app-code_{securedrop_version}+{securedrop_target_platform}_amd64.deb - /tmp/build/securedrop-ossec-agent-{ossec_version}+{securedrop_version}-amd64.deb - /tmp/build/securedrop-ossec-server-{ossec_version}+{securedrop_version}-amd64.deb - /tmp/build/ossec-server-{ossec_version}-amd64.deb - /tmp/build/ossec-agent-{ossec_version}-amd64.deb - /tmp/build/securedrop-keyring-{keyring_version}+{securedrop_version}-amd64.deb - /tmp/build/securedrop-config-{config_version}+{securedrop_version}-amd64.deb + - /tmp/build/securedrop-grsec-{grsec_version}-amd64.deb lintian_tags: # - non-standard-file-perm diff --git a/molecule/builder/tests/conftest.py b/molecule/builder/tests/conftest.py deleted file mode 100644 --- a/molecule/builder/tests/conftest.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Import variables from vars.yml and inject into pytest namespace -""" - -import os -import yaml - - -def pytest_namespace(): - """ Return dict of vars imported as 'securedrop_test_vars' into pytest - global namespace - """ - filepath = os.path.join(os.path.dirname(__file__), "vars.yml") - with open(filepath, 'r') as f: - return dict(securedrop_test_vars=yaml.safe_load(f)) diff --git a/molecule/builder/tests/test_build_dependencies.py b/molecule/builder/tests/test_build_dependencies.py deleted file mode 100644 --- a/molecule/builder/tests/test_build_dependencies.py +++ /dev/null @@ -1,96 +0,0 @@ -import pytest - - -securedrop_test_vars = pytest.securedrop_test_vars - - -def get_build_directories(): - """ - Helper function to retrieve module-namespace test vars and format - the strings to interpolate version info. Keeps the test vars DRY - in terms of version info, and required since we can't rely on - Jinja-based evaluation of the YAML files (so we can't trivially - reuse vars in other var values, as is the case with Ansible). - """ - substitutions = dict( - securedrop_version=securedrop_test_vars.securedrop_version, - ossec_version=securedrop_test_vars.ossec_version, - keyring_version=securedrop_test_vars.keyring_version, - config_version=securedrop_test_vars.config_version, - ) - build_directories = [d.format(**substitutions) for d - in securedrop_test_vars.build_directories] - return build_directories - - -build_directories = get_build_directories() - - [email protected]("package", [ - "devscripts", - "git", - "libssl-dev", - "python-dev", - "python-pip", - "secure-delete", -]) -def test_build_dependencies(Package, package): - """ - Ensure development apt dependencies are installed. - The devscripts and git packages are required for running the - `update_version.sh` script, which should be executed inside the - build VM, so let's make sure they're present. - """ - assert Package(package).is_installed - - -def test_pip_wheel_installed(Command): - """ - Ensure `wheel` is installed via pip, for packaging Python - dependencies into a Debian package. - """ - c = Command("pip freeze") - assert "wheel==0.24.0" in c.stdout - assert c.rc == 0 - - -def test_sass_gem_installed(Command): - """ - Ensure the `sass` Ruby gem is installed, for compiling SASS to CSS. - """ - c = Command("gem list") - assert "sass (3.4.23)" in c.stdout - assert c.rc == 0 - - -def test_pip_dependencies_installed(Command): - """ - Ensure the development pip dependencies are installed - """ - c = Command("pip list installed") - assert "Flask-Babel" in c.stdout - assert c.rc == 0 - - [email protected]("directory", get_build_directories()) -def test_build_directories(File, directory): - """ - Ensure the build directories are present. These directories are - the top-level of the Debian packages being created. They contain - nested subdirs of varying complexity, depending on package. - """ - if '{}' in directory: - directory = directory.format(securedrop_test_vars.securedrop_version) - assert File(directory).is_directory - - -def test_build_all_packages_updated(Command): - """ - Ensure a dist-upgrade has already been run, by checking that no - packages are eligible for upgrade currently. This will ensure that - all upgrades, security and otherwise, have been applied to the VM - used to build packages. - """ - c = Command('aptitude --simulate -y dist-upgrade') - assert c.rc == 0 - assert "No packages will be installed, upgraded, or removed." in c.stdout diff --git a/molecule/builder/tests/test_ossec_packages.py b/molecule/builder/tests/test_ossec_packages.py deleted file mode 100644 --- a/molecule/builder/tests/test_ossec_packages.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - - [email protected]('apt_package', [ - 'inotify-tools', - 'libssl-dev', - 'make', - 'tar', - 'unzip', -]) -def test_build_ossec_apt_dependencies(Package, apt_package): - """ - Ensure that the apt dependencies required for building the OSSEC - source deb packages (not the metapackages) are installed. - """ - assert Package(apt_package).is_installed diff --git a/molecule/builder/tests/test_securedrop_deb_package.py b/molecule/builder/tests/test_securedrop_deb_package.py deleted file mode 100644 --- a/molecule/builder/tests/test_securedrop_deb_package.py +++ /dev/null @@ -1,288 +0,0 @@ -import pytest -import os -import re - - -securedrop_test_vars = pytest.securedrop_test_vars - - -def extract_package_name_from_filepath(filepath): - """ - Helper function to infer intended package name from - the absolute filepath, using a rather garish regex. - E.g., given: - securedrop-ossec-agent-2.8.2+0.3.10-amd64.deb - - retuns: - - securedrop-ossec-agent - - which can then be used for comparisons in dpkg output. - """ - deb_basename = os.path.basename(filepath) - package_name = re.search('^([a-z\-]+(?!\d))', deb_basename).groups()[0] - assert deb_basename.startswith(package_name) - return package_name - - -def get_deb_packages(): - """ - Helper function to retrieve module-namespace test vars and format - the strings to interpolate version info. Keeps the test vars DRY - in terms of version info, and required since we can't rely on - Jinja-based evaluation of the YAML files (so we can't trivially - reuse vars in other var values, as is the case with Ansible). - """ - substitutions = dict( - securedrop_version=securedrop_test_vars.securedrop_version, - ossec_version=securedrop_test_vars.ossec_version, - keyring_version=securedrop_test_vars.keyring_version, - config_version=securedrop_test_vars.config_version, - ) - - deb_packages = [d.format(**substitutions) for d - in securedrop_test_vars.build_deb_packages] - return deb_packages - - -deb_packages = get_deb_packages() - - -def get_deb_tags(): - """ - Helper function to build array of package and tag tuples - for lintian. - """ - deb_tags = [] - - for deb in get_deb_packages(): - for tag in securedrop_test_vars.lintian_tags: - deb_tags.append((deb, tag)) - - return deb_tags - - -deb_tags = get_deb_tags() - - [email protected]("deb", deb_packages) -def test_build_deb_packages(File, deb): - """ - Sanity check the built Debian packages for Control field - values and general package structure. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - assert deb_package.is_file - - [email protected]("deb", deb_packages) -def test_deb_packages_appear_installable(File, Command, Sudo, deb): - """ - Confirms that a dry-run of installation reports no errors. - Simple check for valid Debian package structure, but not thorough. - When run on a malformed package, `dpkg` will report: - - dpkg-deb: error: `foo.deb' is not a debian format archive - - Testing application behavior is left to the functional tests. - """ - - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - - deb_basename = os.path.basename(deb_package.path) - package_name = extract_package_name_from_filepath(deb_package.path) - assert deb_basename.startswith(package_name) - - # Sudo is required to call `dpkg --install`, even as dry-run. - with Sudo(): - c = Command("dpkg --install --dry-run {}".format(deb_package.path)) - assert "Selecting previously unselected package {}".format( - package_name) in c.stdout - regex = "Preparing to unpack [./]+{} ...".format( - re.escape(deb_basename)) - assert re.search(regex, c.stdout, re.M) - assert c.rc == 0 - - [email protected]("deb", deb_packages) -def test_deb_package_control_fields(File, Command, deb): - """ - Ensure Debian Control fields are populated as expected in the package. - These checks are rather superficial, and don't actually confirm that the - .deb files are not broken. At a later date, consider integration tests - that actually use these built files during an Ansible provisioning run. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - package_name = extract_package_name_from_filepath(deb_package.path) - # The `--field` option will display all fields if none are specified. - c = Command("dpkg-deb --field {}".format(deb_package.path)) - - assert "Maintainer: SecureDrop Team <[email protected]>" in c.stdout - # The securedrop-config package is architecture indepedent - if package_name == "securedrop-config": - assert "Architecture: all" in c.stdout - else: - assert "Architecture: amd64" in c.stdout - - assert "Package: {}".format(package_name) in c.stdout - assert c.rc == 0 - - [email protected]("deb", deb_packages) -def test_deb_package_control_fields_homepage(File, Command, deb): - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - # The `--field` option will display all fields if none are specified. - c = Command("dpkg-deb --field {}".format(deb_package.path)) - # The OSSEC source packages will have a different homepage; - # all other packages should set securedrop.org as homepage. - if os.path.basename(deb_package.path).startswith('ossec-'): - assert "Homepage: http://ossec.net" in c.stdout - else: - assert "Homepage: https://securedrop.org" in c.stdout - - [email protected]("deb", deb_packages) -def test_deb_package_contains_no_config_file(File, Command, deb): - """ - Ensures the `securedrop-app-code` package does not ship a `config.py` - file. Doing so would clobber the site-specific changes made via Ansible. - - Somewhat lazily checking all deb packages, rather than just the app-code - package, but it accomplishes the same in a DRY manner. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) - assert not re.search("^.*config\.py$", c.stdout, re.M) - - [email protected]("deb", deb_packages) -def test_deb_package_contains_pot_file(File, Command, deb): - """ - Ensures the `securedrop-app-code` package has the - messages.pot file - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) - # Only relevant for the securedrop-app-code package: - if "securedrop-app-code" in deb_package.path: - assert re.search("^.*messages.pot$", c.stdout, re.M) - - [email protected]("deb", deb_packages) -def test_deb_package_contains_mo_file(File, Command, deb): - """ - Ensures the `securedrop-app-code` package has at least one - compiled mo file. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - c = Command("dpkg-deb --contents {}".format(deb_package.path)) - # Only relevant for the securedrop-app-code package: - if "securedrop-app-code" in deb_package.path: - assert re.search("^.*messages\.mo$", c.stdout, re.M) - - [email protected]("deb", deb_packages) -def test_deb_package_contains_no_generated_assets(File, Command, deb): - """ - Ensures the `securedrop-app-code` package does not ship a minified - static assets, which are built automatically via Flask-Assets, and may be - present in the source directory used to build from. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - - # Only relevant for the securedrop-app-code package: - if "securedrop-app-code" in deb_package.path: - c = Command("dpkg-deb --contents {}".format(deb_package.path)) - # static/gen/ directory should exist - assert re.search("^.*\./var/www/securedrop" - "/static/gen/$", c.stdout, re.M) - # static/gen/ directory should be empty - assert not re.search("^.*\./var/www/securedrop" - "/static/gen/.+$", c.stdout, re.M) - - # static/.webassets-cache/ directory should exist - assert re.search("^.*\./var/www/securedrop" - "/static/.webassets-cache/$", c.stdout, re.M) - # static/.webassets-cache/ directory should be empty - assert not re.search("^.*\./var/www/securedrop" - "/static/.webassets-cache/.+$", c.stdout, re.M) - - # no SASS files should exist; only the generated CSS files. - assert not re.search("^.*sass.*$", c.stdout, re.M) - - #no .map files should exist; only the generated CSS files. - assert not re.search("^.*css.map$", c.stdout, re.M) - [email protected]("deb", deb_packages) -def test_deb_package_contains_css(File, Command, deb): - """ - Ensures the `securedrop-app-code` package contains files that - are generated during the `sass` build process. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - - # Only relevant for the securedrop-app-code package: - if "securedrop-app-code" in deb_package.path: - c = Command("dpkg-deb --contents {}".format(deb_package.path)) - - for css_type in ['journalist', 'source']: - assert re.search("^.*\./var/www/securedrop/static/" - "css/{}.css$".format(css_type), c.stdout, re.M) - - [email protected]("deb, tag", deb_tags) -def test_deb_package_lintian(File, Command, deb, tag): - """ - Ensures lintian likes our Debian packages. - """ - deb_package = File(deb.format( - securedrop_test_vars.securedrop_version)) - c = Command("""lintian --tags {} --no-tag-display-limit {}""".format( - tag, deb_package.path)) - assert len(c.stdout) == 0 - [email protected]("deb", deb_packages) -def test_deb_app_package_contains_https_validate_dir(host, deb): - """ - Ensures the `securedrop-app-code` package ships with a validation - '.well-known' directory - """ - deb_package = host.file(deb.format( - securedrop_test_vars.securedrop_version)) - - # Only relevant for the securedrop-app-code package: - if "securedrop-app-code" in deb_package.path: - c = host.run("dpkg-deb --contents {}".format(deb_package.path)) - # static/gen/ directory should exist - assert re.search("^.*\./var/www/securedrop/" - ".well-known/$", c.stdout, re.M) - [email protected]("deb", deb_packages) -def test_grsec_metapackage(host, deb): - """ - Sanity checks on the securedrop-grsec metapackage. Mostly checks - for presence of PaX flags hook and sysctl settings. - Does not validate file contents, just presence. - """ - - deb_package = host.file(deb.format( - securedrop_test_vars.securedrop_version)) - - if "securedrop-grsec" in deb_package.path: - c = host.run("dpkg-deb --contents {}".format(deb_package.path)) - # Custom sysctl options should be present - assert re.search("^.*\./etc/sysctl.d/30-securedrop.conf$", - c.stdout, re.M) - c = host.run("dpkg-deb --contents {}".format(deb_package.path)) - # Post-install kernel hook for managing PaX flags must exist. - assert re.search("^.*\./etc/kernel/postinst.d/paxctl-grub$", - c.stdout, re.M) diff --git a/molecule/fetch-tor-packages/tests/test_tor_packages.py b/molecule/fetch-tor-packages/tests/test_tor_packages.py new file mode 100644 --- /dev/null +++ b/molecule/fetch-tor-packages/tests/test_tor_packages.py @@ -0,0 +1,50 @@ +import os +import pytest + + +testinfra_hosts = ['docker://tor-package-fetcher-xenial'] +TOR_DOWNLOAD_DIR = "/tmp/tor-debs" +TOR_PACKAGES = [ + {"name": "tor", "arch": "amd64"}, + {"name": "tor-geoipdb", "arch": "all"}, +] +TOR_VERSION = "0.4.1.6-1~xenial+1" + + +def test_tor_apt_repo(host): + """ + Ensure the upstream Tor Project repo is correct, since that's + where we've fetched the deb packages from. + """ + repo_file = "/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list" # noqa + f = host.file(repo_file) + assert f.exists + assert f.contains("https://deb.torproject.org") + + [email protected]("pkg", TOR_PACKAGES) +def test_tor_package_versions(host, pkg): + """ + Inspect package info and confirm we're getting the version we expect. + """ + package_name = "{}_{}_{}.deb".format(pkg["name"], TOR_VERSION, pkg["arch"]) + filepath = os.path.join(TOR_DOWNLOAD_DIR, package_name) + f = host.file(filepath) + assert f.exists + assert f.is_file + + cmd = "dpkg-deb -f {} Version".format(filepath) + package_version = host.check_output(cmd) + assert package_version == TOR_VERSION + + +def test_tor_package_platform(host): + """ + Sanity check to ensure we're running on Xenial, which is the only + option for SecureDrop distributions supported by upstream Tor Project. + The Trusty channel was disabled by Tor Project on 2019-01-08. + """ + assert host.system_info.type == "linux" + assert host.system_info.distribution == "ubuntu" + assert host.system_info.codename == "xenial" + assert host.system_info.release == "16.04" diff --git a/testinfra/app-code/test_haveged.py b/molecule/testinfra/staging/app-code/test_haveged.py similarity index 66% rename from testinfra/app-code/test_haveged.py rename to molecule/testinfra/staging/app-code/test_haveged.py --- a/testinfra/app-code/test_haveged.py +++ b/molecule/testinfra/staging/app-code/test_haveged.py @@ -1,34 +1,37 @@ -def test_haveged_config(File): +testinfra_hosts = ["app-staging"] + + +def test_haveged_config(host): """ Ensure haveged's low entrop watermark is sufficiently high. """ - f = File('/etc/default/haveged') + f = host.file('/etc/default/haveged') assert f.is_file assert f.user == 'root' assert f.group == 'root' - assert oct(f.mode) == '0644' + assert f.mode == 0o644 assert f.contains('^DAEMON_ARGS="-w 2400"$') -def test_haveged_no_duplicate_lines(Command): +def test_haveged_no_duplicate_lines(host): """ Regression test to check for duplicate entries. Earlier playbooks for configuring the SD instances needlessly appended the `DAEMON_ARGS` line everytime the playbook was run. Fortunately the duplicate lines don't break the service, but it's still poor form. """ - c = Command("uniq --repeated /etc/default/haveged") + c = host.run("uniq --repeated /etc/default/haveged") assert c.rc == 0 assert c.stdout == "" -def test_haveged_is_running(Service, Sudo): +def test_haveged_is_running(host): """ Ensure haveged service is running, to provide additional entropy. """ - # Sudo is necessary to read /proc when running under grsecurity, + # sudo is necessary to read /proc when running under grsecurity, # which the App hosts do. Not technically necessary under development. - with Sudo(): - s = Service("haveged") + with host.sudo(): + s = host.service("haveged") assert s.is_running assert s.is_enabled diff --git a/testinfra/app-code/test_securedrop_app_code.py b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py similarity index 51% rename from testinfra/app-code/test_securedrop_app_code.py rename to molecule/testinfra/staging/app-code/test_securedrop_app_code.py --- a/testinfra/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/staging/app-code/test_securedrop_app_code.py @@ -1,99 +1,98 @@ -import os import pytest +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] -def test_apache_default_docroot_is_absent(File): +def test_apache_default_docroot_is_absent(host): """ Ensure that the default docroot for Apache, containing static HTML under Debian, has been removed. Leaving it in place can be a privacy leak, as it displays version information by default. """ - assert not File('/var/www/html').exists + assert not host.file('/var/www/html').exists @pytest.mark.parametrize('package', [ - 'apparmor-utils', - 'gnupg2', - 'haveged', - 'python', - 'python-pip', - 'redis-server', - 'secure-delete', - 'sqlite', - 'supervisor', + 'apache2', + 'apparmor-utils', + 'coreutils', + 'gnupg2', + 'haveged', + 'libapache2-mod-xsendfile', + 'libpython3.5', + 'paxctld', + 'python3', + 'redis-server', + 'securedrop-config', + 'securedrop-keyring', + 'sqlite3', ]) -def test_securedrop_application_apt_dependencies(Package, package): +def test_securedrop_application_apt_dependencies(host, package): """ Ensure apt dependencies required to install `securedrop-app-code` are present. These should be pulled in automatically via apt, due to specification in Depends in package control file. """ - assert Package(package).is_installed + assert host.package(package).is_installed -def test_securedrop_application_test_locale(File, Sudo): +def test_securedrop_application_test_locale(host): """ - Ensure SecureDrop DEFAULT_LOCALE is present. + Ensure both SecureDrop DEFAULT_LOCALE and SUPPORTED_LOCALES are present. """ - securedrop_config = File("{}/config.py".format( + securedrop_config = host.file("{}/config.py".format( securedrop_test_vars.securedrop_code)) - with Sudo(): + with host.sudo(): assert securedrop_config.is_file assert securedrop_config.contains("^DEFAULT_LOCALE") - assert securedrop_config.content.count("DEFAULT_LOCALE") == 1 + assert securedrop_config.content_string.count("DEFAULT_LOCALE") == 1 + assert securedrop_config.content_string.count("SUPPORTED_LOCALES") == 1 + assert "\nSUPPORTED_LOCALES = ['el', 'ar', 'en_US']\n" in securedrop_config.content_string -def test_securedrop_application_test_journalist_key(File, Sudo): +def test_securedrop_application_test_journalist_key(host): """ Ensure the SecureDrop Application GPG public key file is present. This is a test-only pubkey provided in the repository strictly for testing. """ - pubkey_file = File("{}/test_journalist_key.pub".format( + pubkey_file = host.file("{}/test_journalist_key.pub".format( securedrop_test_vars.securedrop_data)) - # Sudo is only necessary when testing against app hosts, since the + # sudo is only necessary when testing against app hosts, since the # permissions are tighter. Let's elevate privileges so we're sure # we can read the correct file attributes and test them. - with Sudo(): + with host.sudo(): assert pubkey_file.is_file assert pubkey_file.user == "root" assert pubkey_file.group == "root" - assert oct(pubkey_file.mode) == "0644" + assert pubkey_file.mode == 0o644 # Let's make sure the corresponding fingerprint is specified # in the SecureDrop app configuration. - securedrop_config = File("{}/config.py".format( + securedrop_config = host.file("{}/config.py".format( securedrop_test_vars.securedrop_code)) - with Sudo(): + with host.sudo(): assert securedrop_config.is_file - # travis needs the config.py file ran owned by root not sure why - # just saw this note in the travis.yml config - if hostenv == "travis": - assert securedrop_config.user == "root" - assert securedrop_config.group == "root" - else: - assert securedrop_config.user == \ - securedrop_test_vars.securedrop_user - assert securedrop_config.group == \ - securedrop_test_vars.securedrop_user - assert oct(securedrop_config.mode) == "0600" + assert securedrop_config.user == \ + securedrop_test_vars.securedrop_user + assert securedrop_config.group == \ + securedrop_test_vars.securedrop_user + assert securedrop_config.mode == 0o600 assert securedrop_config.contains( "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$") -def test_securedrop_application_sqlite_db(File, Sudo): +def test_securedrop_application_sqlite_db(host): """ Ensure sqlite database exists for application. The database file should be created by Ansible on first run. """ - # Sudo is necessary under the App hosts, which have restrictive file + # sudo is necessary under the App hosts, which have restrictive file # permissions on the doc root. Not technically necessary under dev host. - with Sudo(): - f = File("{}/db.sqlite".format(securedrop_test_vars.securedrop_data)) + with host.sudo(): + f = host.file("{}/db.sqlite".format(securedrop_test_vars.securedrop_data)) assert f.is_file assert f.user == securedrop_test_vars.securedrop_user assert f.group == securedrop_test_vars.securedrop_user - assert oct(f.mode) == "0644" + assert f.mode == 0o640 diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py b/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py @@ -0,0 +1,50 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_rqrequeue_service(host): + """ + Verify configuration of securedrop_rqrequeue systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_rqrequeue.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop rqrequeue process", + "After=redis-server.service", + "Wants=redis-server.service", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/rqrequeue --interval 60".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_rqrequeue") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py b/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py @@ -0,0 +1,49 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_rqworker_service(host): + """ + Verify configuration of securedrop_rqworker systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_rqworker.service" + + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop rq worker", + "After=redis-server.service", + "Wants=redis-server.service", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/rqworker".format(securedrop_test_vars.securedrop_venv_bin), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_rqworker") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py b/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py @@ -0,0 +1,48 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_shredder_service(host): + """ + Verify configuration of securedrop_shredder systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_shredder.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop shredder", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/shredder --interval 60".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_shredder") + assert s.is_enabled + assert s.is_running diff --git a/testinfra/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py similarity index 75% rename from testinfra/app/apache/test_apache_journalist_interface.py rename to molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/testinfra/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -2,6 +2,7 @@ import re +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars # Setting once so it can be reused in multiple tests. @@ -12,27 +13,35 @@ 'Header set X-XSS-Protection: "1; mode=block"', 'Header set X-Content-Type-Options: nosniff', 'Header set X-Download-Options: noopen', - "Header set X-Content-Security-Policy: \"default-src 'self'\"", - "Header set Content-Security-Policy: \"default-src 'self'\"", + 'Header set X-Content-Security-Policy: "' + 'default-src \'none\'; ' + 'script-src \'self\'; ' + 'style-src \'self\'; ' + 'img-src \'self\'; ' + 'font-src \'self\';"', + 'Header set Content-Security-Policy: "' + 'default-src \'none\'; ' + 'script-src \'self\'; ' + 'style-src \'self\'; ' + 'img-src \'self\'; ' + 'font-src \'self\';"', 'Header set Referrer-Policy "no-referrer"', - 'Header unset Etag', ] - # Test is not DRY; haven't figured out how to parametrize on # multiple inputs, so explicitly redeclaring test logic. @pytest.mark.parametrize("header", wanted_apache_headers) -def test_apache_headers_journalist_interface(File, header): +def test_apache_headers_journalist_interface(host, header): """ Test for expected headers in Document Interface vhost config. """ - f = File("/etc/apache2/sites-available/journalist.conf") + f = host.file("/etc/apache2/sites-available/journalist.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 header_regex = "^{}$".format(re.escape(header)) - assert re.search(header_regex, f.content, re.M) + assert re.search(header_regex, f.content_string, re.M) # Block of directory declarations for Apache vhost is common @@ -50,11 +59,11 @@ def test_apache_headers_journalist_interface(File, header): <Directory /var/www/> Options None AllowOverride None - <Limit GET POST HEAD> + <Limit GET POST HEAD DELETE> Order allow,deny allow from {apache_allow_from} </Limit> - <LimitExcept GET POST HEAD> + <LimitExcept GET POST HEAD DELETE> Order deny,allow Deny from all </LimitExcept> @@ -63,11 +72,11 @@ def test_apache_headers_journalist_interface(File, header): <Directory {securedrop_code}> Options None AllowOverride None - <Limit GET POST HEAD> + <Limit GET POST HEAD DELETE> Order allow,deny allow from {apache_allow_from} </Limit> - <LimitExcept GET POST HEAD> + <LimitExcept GET POST HEAD DELETE> Order deny,allow Deny from all </LimitExcept> @@ -85,6 +94,7 @@ def test_apache_headers_journalist_interface(File, header): securedrop_test_vars.securedrop_code), 'WSGIProcessGroup journalist', 'WSGIScriptAlias / /var/www/journalist.wsgi', + 'WSGIPassAuthorization On', 'Header set Cache-Control "no-store"', "Alias /static {}/static".format(securedrop_test_vars.securedrop_code), """ @@ -102,7 +112,7 @@ def test_apache_headers_journalist_interface(File, header): 'ErrorLog /var/log/apache2/journalist-error.log', 'CustomLog /var/log/apache2/journalist-access.log combined', ]) -def test_apache_config_journalist_interface(File, apache_opt): +def test_apache_config_journalist_interface(host, apache_opt): """ Ensure the necessary Apache settings for serving the application are in place. Some values will change according to the host, @@ -111,25 +121,25 @@ def test_apache_config_journalist_interface(File, apache_opt): These checks apply only to the Document Interface, used by Journalists. """ - f = File("/etc/apache2/sites-available/journalist.conf") + f = host.file("/etc/apache2/sites-available/journalist.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex = "^{}$".format(re.escape(apache_opt)) - assert re.search(regex, f.content, re.M) + assert re.search(regex, f.content_string, re.M) -def test_apache_journalist_interface_vhost(File): +def test_apache_journalist_interface_vhost(host): """ Ensure the document root is configured with correct access restrictions for serving Journalist Interface application code. """ - f = File("/etc/apache2/sites-available/journalist.conf") - assert common_apache2_directory_declarations in f.content + f = host.file("/etc/apache2/sites-available/journalist.conf") + assert common_apache2_directory_declarations in f.content_string -def test_apache_logging_journalist_interface(File, Command, Sudo): +def test_apache_logging_journalist_interface(host): """ Check that logging is configured correctly for the Journalist Interface. The actions of Journalists are logged by the system, so that an Admin can @@ -138,15 +148,15 @@ def test_apache_logging_journalist_interface(File, Command, Sudo): Logs were broken for some period of time, logging only "combined" to the logfile, rather than the combined LogFormat intended. """ - # Sudo is necessary because /var/log/apache2 is mode 0750. - with Sudo(): - f = File("/var/log/apache2/journalist-access.log") + # sudo is necessary because /var/log/apache2 is mode 0750. + with host.sudo(): + f = host.file("/var/log/apache2/journalist-access.log") assert f.is_file if f.size == 0: # If the file is empty, the Journalist Interface hasn't been used # yet, so make a quick GET request local to the host so we can # validate the log entry. - Command.check_output("curl http://127.0.0.1:8080") + host.check_output("curl http://127.0.0.1:8080") assert f.size > 0 # Make sure something was logged. # LogFormat declaration was missing, so track regressions that log diff --git a/testinfra/app/apache/test_apache_service.py b/molecule/testinfra/staging/app/apache/test_apache_service.py similarity index 65% rename from testinfra/app/apache/test_apache_service.py rename to molecule/testinfra/staging/app/apache/test_apache_service.py --- a/testinfra/app/apache/test_apache_service.py +++ b/molecule/testinfra/staging/app/apache/test_apache_service.py @@ -1,6 +1,7 @@ import pytest +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars @@ -8,12 +9,12 @@ "source", "journalist", ]) -def test_apache_enabled_sites(Command, Sudo, apache_site): +def test_apache_enabled_sites(host, apache_site): """ Ensure the Source and Journalist interfaces are enabled. """ - with Sudo(): - c = Command("/usr/sbin/a2query -s {}".format(apache_site)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -s {}".format(apache_site)) assert "{} (enabled".format(apache_site) in c.stdout assert c.rc == 0 @@ -21,32 +22,32 @@ def test_apache_enabled_sites(Command, Sudo, apache_site): @pytest.mark.parametrize("apache_site", [ "000-default", ]) -def test_apache_disabled_sites(Command, apache_site): +def test_apache_disabled_sites(host, apache_site): """ Ensure the default HTML document root is disabled. """ - c = Command("a2query -s {}".format(apache_site)) + c = host.run("a2query -s {}".format(apache_site)) assert "No site matches {} (disabled".format(apache_site) in c.stderr assert c.rc == 32 -def test_apache_service(Service, Sudo): +def test_apache_service(host): """ Ensure Apache service is running. """ - # Sudo is necessary to run `service apache2 status`, otherwise + # sudo is necessary to run `service apache2 status`, otherwise # the service is falsely reported as not running. - with Sudo(): - s = Service("apache2") + with host.sudo(): + s = host.service("apache2") assert s.is_running assert s.is_enabled -def test_apache_user(User): +def test_apache_user(host): """ Ensure user account for running application code is configured correctly. """ - u = User("www-data") + u = host.user("www-data") assert u.exists assert u.home == "/var/www" assert u.shell == "/usr/sbin/nologin" @@ -56,14 +57,14 @@ def test_apache_user(User): "80", "8080", ]) -def test_apache_listening(Socket, Sudo, port): +def test_apache_listening(host, port): """ Ensure Apache is listening on proper ports and interfaces. In staging, expect the service to be bound to 0.0.0.0, but in prod, it should be restricted to 127.0.0.1. """ - # Sudo is necessary to read from /proc/net/tcp. - with Sudo(): - s = Socket("tcp://{}:{}".format( + # sudo is necessary to read from /proc/net/tcp. + with host.sudo(): + s = host.socket("tcp://{}:{}".format( securedrop_test_vars.apache_listening_address, port)) assert s.is_listening diff --git a/testinfra/app/apache/test_apache_source_interface.py b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py similarity index 79% rename from testinfra/app/apache/test_apache_source_interface.py rename to molecule/testinfra/staging/app/apache/test_apache_source_interface.py --- a/testinfra/app/apache/test_apache_source_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_source_interface.py @@ -1,22 +1,22 @@ import pytest import re - +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars @pytest.mark.parametrize("header", securedrop_test_vars.wanted_apache_headers) -def test_apache_headers_source_interface(File, header): +def test_apache_headers_source_interface(host, header): """ Test for expected headers in Source Interface vhost config. """ - f = File("/etc/apache2/sites-available/source.conf") + f = host.file("/etc/apache2/sites-available/source.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 header_regex = "^{}$".format(re.escape(header)) - assert re.search(header_regex, f.content, re.M) + assert re.search(header_regex, f.content_string, re.M) @pytest.mark.parametrize("apache_opt", [ @@ -27,7 +27,7 @@ def test_apache_headers_source_interface(File, header): 'WSGIProcessGroup source', 'WSGIScriptAlias / /var/www/source.wsgi', 'Header set Cache-Control "no-store"', - 'Header set Referrer-Policy "no-referrer"', + 'Header set Referrer-Policy "same-origin"', "Alias /static {}/static".format(securedrop_test_vars.securedrop_code), """ <Directory {}/static> @@ -46,7 +46,7 @@ def test_apache_headers_source_interface(File, header): 'ErrorDocument 500 /notfound', "ErrorLog {}".format(securedrop_test_vars.apache_source_log), ]) -def test_apache_config_source_interface(File, apache_opt): +def test_apache_config_source_interface(host, apache_opt): """ Ensure the necessary Apache settings for serving the application are in place. Some values will change according to the host, @@ -55,10 +55,10 @@ def test_apache_config_source_interface(File, apache_opt): These checks apply only to the Source Interface, used by Sources. """ - f = File("/etc/apache2/sites-available/source.conf") + f = host.file("/etc/apache2/sites-available/source.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex = "^{}$".format(re.escape(apache_opt)) - assert re.search(regex, f.content, re.M) + assert re.search(regex, f.content_string, re.M) diff --git a/testinfra/app/apache/test_apache_system_config.py b/molecule/testinfra/staging/app/apache/test_apache_system_config.py similarity index 79% rename from testinfra/app/apache/test_apache_system_config.py rename to molecule/testinfra/staging/app/apache/test_apache_system_config.py --- a/testinfra/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/staging/app/apache/test_apache_system_config.py @@ -1,29 +1,27 @@ import pytest import re - +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars @pytest.mark.parametrize("package", [ - "apache2-mpm-worker", - "libapache2-mod-wsgi", "libapache2-mod-xsendfile", ]) -def test_apache_apt_packages(Package, package): +def test_apache_apt_packages(host, package): """ Ensure required Apache packages are installed. """ - assert Package(package).is_installed + assert host.package(package).is_installed -def test_apache_security_config_deprecated(File): +def test_apache_security_config_deprecated(host): """ Ensure that /etc/apache2/security is absent, since it was setting redundant options already presentin /etc/apache2/apache2.conf. See #643 for discussion. """ - assert not File("/etc/apache2/security").exists + assert not host.file("/etc/apache2/security").exists @pytest.mark.parametrize("apache_opt", [ @@ -48,26 +46,26 @@ def test_apache_security_config_deprecated(File): 'ServerSignature Off', 'TraceEnable Off', ]) -def test_apache_config_settings(File, apache_opt): +def test_apache_config_settings(host, apache_opt): """ Check required Apache config settings for general server. These checks do not target individual interfaces, e.g. Source versus Document Interface, and instead apply to Apache more generally. """ - f = File("/etc/apache2/apache2.conf") + f = host.file("/etc/apache2/apache2.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" - assert re.search("^{}$".format(re.escape(apache_opt)), f.content, re.M) + assert f.mode == 0o644 + assert re.search("^{}$".format(re.escape(apache_opt)), f.content_string, re.M) @pytest.mark.parametrize("port", [ "80", "8080", ]) -def test_apache_ports_config(File, SystemInfo, port): +def test_apache_ports_config(host, port): """ Ensure Apache ports config items, which specify how the Source and Document Interfaces are configured to be served @@ -75,11 +73,11 @@ def test_apache_ports_config(File, SystemInfo, port): to permit port forwarding for local testing, but in production, they're restricted to localhost, for use over Tor. """ - f = File("/etc/apache2/ports.conf") + f = host.file("/etc/apache2/ports.conf") assert f.is_file assert f.user == "root" assert f.group == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 listening_regex = "^Listen {}:{}$".format(re.escape( securedrop_test_vars.apache_listening_address), port) @@ -105,14 +103,14 @@ def test_apache_ports_config(File, SystemInfo, port): 'wsgi', 'xsendfile', ]) -def test_apache_modules_present(Command, Sudo, apache_module): +def test_apache_modules_present(host, apache_module): """ Ensure presence of required Apache modules. Application will not work correctly if these are missing. A separate test will check for disabled modules. """ - with Sudo(): - c = Command("/usr/sbin/a2query -m {}".format(apache_module)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -m {}".format(apache_module)) assert "{} (enabled".format(apache_module) in c.stdout assert c.rc == 0 @@ -124,14 +122,14 @@ def test_apache_modules_present(Command, Sudo, apache_module): 'env', 'status', ]) -def test_apache_modules_absent(Command, Sudo, apache_module): +def test_apache_modules_absent(host, apache_module): """ Ensure absence of unwanted Apache modules. Application does not require these modules, so they should be disabled to reduce attack surface. A separate test will check for disabled modules. """ - with Sudo(): - c = Command("/usr/sbin/a2query -m {}".format(apache_module)) + with host.sudo(): + c = host.run("/usr/sbin/a2query -m {}".format(apache_module)) assert "No module matches {} (disabled".format(apache_module) in \ c.stderr assert c.rc == 32 @@ -139,7 +137,7 @@ def test_apache_modules_absent(Command, Sudo, apache_module): @pytest.mark.parametrize("logfile", securedrop_test_vars.allowed_apache_logfiles) -def test_apache_logfiles_present(File, Command, Sudo, logfile): +def test_apache_logfiles_present(host, logfile): """" Ensure that whitelisted Apache log files for the Source and Journalist Interfaces are present. In staging, we permit a "source-error" log, @@ -148,13 +146,13 @@ def test_apache_logfiles_present(File, Command, Sudo, logfile): Apache log directory. """ # We need elevated privileges to read files inside /var/log/apache2 - with Sudo(): - f = File(logfile) + with host.sudo(): + f = host.file(logfile) assert f.is_file assert f.user == "root" -def test_apache_logfiles_no_extras(Command, Sudo): +def test_apache_logfiles_no_extras(host): """ Ensure that no unwanted Apache logfiles are present. Complements the `test_apache_logfiles_present` config test. Here, we confirm that the @@ -162,7 +160,7 @@ def test_apache_logfiles_no_extras(Command, Sudo): on the Application Server, whether staging or prod. """ # We need elevated privileges to read files inside /var/log/apache2 - with Sudo(): - c = Command("find /var/log/apache2 -mindepth 1 | wc -l") + with host.sudo(): + c = host.run("find /var/log/apache2 -mindepth 1 | wc -l") assert int(c.stdout) == \ len(securedrop_test_vars.allowed_apache_logfiles) diff --git a/testinfra/app/test_network.py b/molecule/testinfra/staging/app/test_app_network.py similarity index 65% rename from testinfra/app/test_network.py rename to molecule/testinfra/staging/app/test_app_network.py --- a/testinfra/app/test_network.py +++ b/molecule/testinfra/staging/app/test_app_network.py @@ -1,39 +1,40 @@ import os +import io import difflib import pytest from jinja2 import Template +testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars [email protected]() -def test_app_iptables_rules(SystemInfo, Command, Sudo): +def test_app_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison kwargs = dict( mon_ip=os.environ.get('MON_IP', securedrop_test_vars.mon_ip), - default_interface=Command.check_output("ip r | head -n 1 | " - "awk '{ print $5 }'"), - tor_user_id=Command.check_output("id -u debian-tor"), - securedrop_user_id=Command.check_output("id -u www-data"), - ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"), + default_interface=host.check_output("ip r | head -n 1 | " + "awk '{ print $5 }'"), + tor_user_id=host.check_output("id -u debian-tor"), + securedrop_user_id=host.check_output("id -u www-data"), + ssh_group_gid=host.check_output("getent group ssh | cut -d: -f3"), dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters - iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" + iptables = r"iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" environment = os.environ.get("CI_SD_ENV", "staging") iptables_file = "{}/iptables-app-{}.j2".format( os.path.dirname(os.path.abspath(__file__)), environment) # template out a local iptables jinja file - jinja_iptables = Template(open(iptables_file, 'r').read()) + jinja_iptables = Template(io.open(iptables_file, 'r').read()) iptables_expected = jinja_iptables.render(**kwargs) - with Sudo(): + with host.sudo(): # Actually run the iptables scrape command - iptables = Command.check_output(iptables) + iptables = host.check_output(iptables) # print diff comparison (only shows up in pytests if test fails or # verbosity turned way up) for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'), diff --git a/molecule/testinfra/staging/app/test_apparmor.py b/molecule/testinfra/staging/app/test_apparmor.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app/test_apparmor.py @@ -0,0 +1,132 @@ +import pytest + + +testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars + + [email protected]('pkg', ['apparmor', 'apparmor-utils']) +def test_apparmor_pkg(host, pkg): + """ Apparmor package dependencies """ + assert host.package(pkg).is_installed + + +def test_apparmor_enabled(host): + """ Check that apparmor is enabled """ + with host.sudo(): + assert host.run("aa-status --enabled").rc == 0 + + +apache2_capabilities = [ + 'dac_override', + 'kill', + 'net_bind_service', + 'sys_ptrace' +] + + [email protected]('cap', apache2_capabilities) +def test_apparmor_apache_capabilities(host, cap): + """ check for exact list of expected app-armor capabilities for apache2 """ + c = host.run( + r"perl -nE '/^\s+capability\s+(\w+),$/ && say $1' /etc/apparmor.d/usr.sbin.apache2" + ) + assert cap in c.stdout + + +def test_apparmor_apache_exact_capabilities(host): + """ ensure no extra capabilities are defined for apache2 """ + c = host.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.apache2") + assert str(len(apache2_capabilities)) == c + + +tor_capabilities = ['setgid'] + + [email protected]('cap', tor_capabilities) +def test_apparmor_tor_capabilities(host, cap): + """ check for exact list of expected app-armor capabilities for tor """ + c = host.run(r"perl -nE '/^\s+capability\s+(\w+),$/ && say $1' /etc/apparmor.d/usr.sbin.tor") + assert cap in c.stdout + + +def test_apparmor_tor_exact_capabilities(host): + """ ensure no extra capabilities are defined for tor """ + c = host.check_output("grep -ic capability " + "/etc/apparmor.d/usr.sbin.tor") + assert str(len(tor_capabilities)) == c + + [email protected]('profile', [ + 'ntpd', + 'apache2', + 'tcpdump', + 'tor', +]) +def test_apparmor_ensure_not_disabled(host, profile): + """ + Explicitly check that enforced profiles are NOT in /etc/apparmor.d/disable + Polling aa-status only checks the last config that was loaded, + this ensures it wont be disabled on reboot. + """ + f = host.file("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile)) + with host.sudo(): + assert not f.exists + + [email protected]('complain_pkg', sdvars.apparmor_complain) +def test_app_apparmor_complain(host, complain_pkg): + """ Ensure app-armor profiles are in complain mode for staging """ + with host.sudo(): + awk = ("awk '/[0-9]+ profiles.*complain." + "/{flag=1;next}/^[0-9]+.*/{flag=0}flag'") + c = host.check_output("aa-status | {}".format(awk)) + assert complain_pkg in c + + +def test_app_apparmor_complain_count(host): + """ Ensure right number of app-armor profiles are in complain mode """ + with host.sudo(): + c = host.check_output("aa-status --complaining") + assert c == str(len(sdvars.apparmor_complain)) + + [email protected]('aa_enforced', sdvars.apparmor_enforce) +def test_apparmor_enforced(host, aa_enforced): + awk = ("awk '/[0-9]+ profiles.*enforce./" + "{flag=1;next}/^[0-9]+.*/{flag=0}flag'") + with host.sudo(): + c = host.check_output("aa-status | {}".format(awk)) + assert aa_enforced in c + + +def test_apparmor_total_profiles(host): + """ Ensure number of total profiles is sum of enforced and + complaining profiles """ + with host.sudo(): + total_expected = str(len(sdvars.apparmor_enforce) + + len(sdvars.apparmor_complain)) + # Xenial about ~20 profiles, so let's expect + # *at least* the sum. + assert host.check_output("aa-status --profiled") >= total_expected + + +def test_aastatus_unconfined(host): + """ Ensure that there are no processes that are unconfined but have + a profile """ + + # There should be 0 unconfined processes. + expected_unconfined = 0 + + unconfined_chk = str("{} processes are unconfined but have" + " a profile defined".format(expected_unconfined)) + with host.sudo(): + aa_status_output = host.check_output("aa-status") + assert unconfined_chk in aa_status_output + + +def test_aa_no_denies_in_syslog(host): + """ Ensure that there are no apparmor denials in syslog """ + with host.sudo(): + f = host.file("/var/log/syslog") + assert 'apparmor="DENIED"' not in f.content_string diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/staging/app/test_appenv.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app/test_appenv.py @@ -0,0 +1,78 @@ +import os.path +import pytest + +testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars + +sdbin = "/opt/venvs/securedrop-app-code/bin" + + [email protected]('exp_pip_pkg', sdvars.pip_deps) +def test_app_pip_deps(host, exp_pip_pkg): + """ Ensure pip dependencies are installed """ + pip = host.pip_package.get_packages(pip_path=os.path.join(sdbin, "pip")) + assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version'] + + +def test_app_wsgi(host): + """ ensure logging is enabled for source interface in staging """ + f = host.file("/var/www/source.wsgi") + with host.sudo(): + assert f.is_file + assert f.mode == 0o640 + assert f.user == 'www-data' + assert f.group == 'www-data' + assert f.contains("^import logging$") + assert f.contains(r"^logging\.basicConfig(stream=sys\.stderr)$") + + +def test_pidfile(host): + """ ensure there are no pid files """ + assert not host.file('/tmp/journalist.pid').exists + assert not host.file('/tmp/source.pid').exists + + [email protected]('app_dir', sdvars.app_directories) +def test_app_directories(host, app_dir): + """ ensure securedrop app directories exist with correct permissions """ + f = host.file(app_dir) + with host.sudo(): + assert f.is_directory + assert f.user == sdvars.securedrop_user + assert f.group == sdvars.securedrop_user + assert f.mode == 0o700 + + +def test_app_code_pkg(host): + """ ensure securedrop-app-code package is installed """ + assert host.package("securedrop-app-code").is_installed + + +def test_supervisor_not_installed(host): + """ ensure supervisor package is not installed """ + assert host.package("supervisor").is_installed is False + + +def test_gpg_key_in_keyring(host): + """ ensure test gpg key is present in app keyring """ + with host.sudo(sdvars.securedrop_user): + c = host.run("gpg --homedir /var/lib/securedrop/keys " + "--list-keys 28271441") + assert "pub 4096R/28271441 2013-10-12" in c.stdout + + +def test_ensure_logo(host): + """ ensure default logo header file exists """ + f = host.file("{}/static/i/logo.png".format(sdvars.securedrop_code)) + with host.sudo(): + assert f.mode == 0o644 + assert f.user == sdvars.securedrop_user + assert f.group == sdvars.securedrop_user + + +def test_securedrop_tmp_clean_cron(host): + """ Ensure securedrop tmp clean cron job in place """ + with host.sudo(): + cronlist = host.run("crontab -l").stdout + cronjob = "@daily {}/manage.py clean-tmp".format(sdvars.securedrop_code) + assert cronjob in cronlist diff --git a/testinfra/app/test_ossec.py b/molecule/testinfra/staging/app/test_ossec_agent.py similarity index 65% rename from testinfra/app/test_ossec.py rename to molecule/testinfra/staging/app/test_ossec_agent.py --- a/testinfra/app/test_ossec.py +++ b/molecule/testinfra/staging/app/test_ossec_agent.py @@ -6,42 +6,42 @@ testinfra_hosts = ["app", "app-staging"] -def test_hosts_files(File, SystemInfo): +def test_hosts_files(host): """ Ensure host files mapping are in place """ - f = File('/etc/hosts') + f = host.file('/etc/hosts') mon_ip = os.environ.get('MON_IP', sdvars.mon_ip) mon_host = sdvars.monitor_hostname - assert f.contains('^127.0.0.1\s*localhost') - assert f.contains('^{}\s*{}\s*securedrop-monitor-server-alias$'.format( + assert f.contains(r'^127.0.0.1\s*localhost') + assert f.contains(r'^{}\s*{}\s*securedrop-monitor-server-alias$'.format( mon_ip, mon_host)) -def test_hosts_duplicate(Command): +def test_hosts_duplicate(host): """ Regression test for duplicate entries """ - assert Command.check_output("uniq --repeated /etc/hosts") == "" + assert host.check_output("uniq --repeated /etc/hosts") == "" -def test_ossec_agent_installed(Package): +def test_ossec_agent_installed(host): """ Check that ossec-agent package is present """ - assert Package("securedrop-ossec-agent").is_installed + assert host.package("securedrop-ossec-agent").is_installed # Permissions don't match between Ansible and OSSEC deb packages postinst. @pytest.mark.xfail -def test_ossec_keyfile_present(File, Command, Sudo, SystemInfo): +def test_ossec_keyfile_present(host): """ ensure client keyfile for ossec-agent is present """ pattern = "^1024 {} {} [0-9a-f]{{64}}$".format( sdvars.app_hostname, os.environ.get('APP_IP', sdvars.app_ip)) regex = re.compile(pattern) - with Sudo(): - f = File("/var/ossec/etc/client.keys") + with host.sudo(): + f = host.file("/var/ossec/etc/client.keys") assert f.exists - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.user == "root" assert f.group == "ossec" assert f.content_string diff --git a/molecule/testinfra/staging/app/test_paxctld.py b/molecule/testinfra/staging/app/test_paxctld.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app/test_paxctld.py @@ -0,0 +1,40 @@ +import pytest +import re + + +testinfra_hosts = ["app-staging"] +securedrop_test_vars = pytest.securedrop_test_vars + + +def test_paxctld_installed(host): + """ + Ensure the paxctld package is installed. + """ + # Only relevant to Xenial installs + if host.system_info.codename == "xenial": + pkg = host.package("paxctld") + assert pkg.is_installed + + +def test_paxctld_config(host): + """ + Ensure the relevant binaries have appropriate flags set in paxctld config. + """ + f = host.file("/etc/paxctld.conf") + + # Only relevant to Xenial installs + if host.system_info.codename == "xenial": + assert f.is_file + regex = r"^/usr/sbin/apache2\s+m$" + assert re.search(regex, f.content_string, re.M) + + +def test_paxctld_service(host): + """ + Ensure the paxctld service is enabled and running. + """ + # Only relevant to Xenial installs + if host.system_info.codename == "xenial": + s = host.service("paxctld") + assert s.is_running + assert s.is_enabled diff --git a/testinfra/common/test_tor_config.py b/molecule/testinfra/staging/app/test_tor_config.py similarity index 51% rename from testinfra/common/test_tor_config.py rename to molecule/testinfra/staging/app/test_tor_config.py --- a/testinfra/common/test_tor_config.py +++ b/molecule/testinfra/staging/app/test_tor_config.py @@ -1,43 +1,31 @@ import pytest +import re +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars @pytest.mark.parametrize('package', [ 'tor', ]) -def test_tor_packages(Package, package): +def test_tor_packages(host, package): """ Ensure Tor packages are installed. Does not include the Tor keyring package, since we want only the SecureDrop Release Signing Key to be used even for Tor packages. """ - assert Package(package).is_installed + assert host.package(package).is_installed -def test_tor_service_running(Command, File, Sudo): +def test_tor_service_running(host): """ Ensure tor is running and enabled. Tor is required for SSH access, - so it must be enabled to start on boot. + so it must be enabled to start on boot. Checks systemd-style services, + used by Xenial. """ - # TestInfra tries determine the service manager intelligently, and - # inappropriately assumes Upstart on Trusty, due to presence of the - # `initctl` command. The tor service is handled via a SysV-style init - # script, so let's just shell out and verify the running and enabled - # states explicitly. - with Sudo(): - assert Command.check_output("service tor status") == \ - " * tor is running" - tor_enabled = Command.check_output("find /etc/rc?.d -name S??tor") - - assert tor_enabled != "" - - tor_targets = tor_enabled.split("\n") - assert len(tor_targets) == 4 - for target in tor_targets: - t = File(target) - assert t.is_symlink - assert t.linked_to == "/etc/init.d/tor" + s = host.service("tor") + assert s.is_running + assert s.is_enabled @pytest.mark.parametrize('torrc_option', [ @@ -45,29 +33,49 @@ def test_tor_service_running(Command, File, Sudo): 'SafeLogging 1', 'RunAsDaemon 1', ]) -def test_tor_torrc_options(File, torrc_option): +def test_tor_torrc_options(host, torrc_option): """ Check for required options in the system Tor config file. These options should be present regardless of machine role, meaning both Application and Monitor server will have them. - Separate tests will check for specific hidden services. + Separate tests will check for specific Onion Services. """ - f = File("/etc/tor/torrc") + f = host.file("/etc/tor/torrc") assert f.is_file assert f.user == "debian-tor" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains("^{}$".format(torrc_option)) -def test_tor_torrc_sandbox(File): +def test_tor_torrc_sandbox(host): """ Check that the `Sandbox 1` declaration is not present in the torrc. The torrc manpage states this option is experimental, and although we use it already on Tails workstations, further testing is required before we push it out to servers. See issues #944 and #1969. """ - f = File("/etc/tor/torrc") + f = host.file("/etc/tor/torrc") # Only `Sandbox 1` will enable, but make sure there are zero occurrances # of "Sandbox", otherwise we may have a regression somewhere. assert not f.contains("^.*Sandbox.*$") + + +def test_tor_v2_onion_url_readable_by_app(host): + v2_url_filepath = "/var/lib/securedrop/source_v2_url" + with host.sudo(): + f = host.file(v2_url_filepath) + assert f.is_file + assert f.user == "www-data" + assert f.mode == 0o644 + assert re.search(r"^[a-z0-9]{16}\.onion$", f.content_string) + + +def test_tor_v3_onion_url_readable_by_app(host): + v3_url_filepath = "/var/lib/securedrop/source_v3_url" + with host.sudo(): + f = host.file(v3_url_filepath) + assert f.is_file + assert f.user == "www-data" + assert f.mode == 0o644 + assert re.search(r"^[a-z0-9]{56}\.onion$", f.content_string) diff --git a/testinfra/common/test_tor_hidden_services.py b/molecule/testinfra/staging/app/test_tor_hidden_services.py similarity index 52% rename from testinfra/common/test_tor_hidden_services.py rename to molecule/testinfra/staging/app/test_tor_hidden_services.py --- a/testinfra/common/test_tor_hidden_services.py +++ b/molecule/testinfra/staging/app/test_tor_hidden_services.py @@ -2,73 +2,82 @@ import re +testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars @pytest.mark.parametrize('tor_service', sdvars.tor_services) -def test_tor_service_directories(File, Sudo, tor_service): +def test_tor_service_directories(host, tor_service): """ Check mode and ownership on Tor service directories. """ - with Sudo(): - f = File("/var/lib/tor/services/{}".format(tor_service['name'])) + with host.sudo(): + f = host.file("/var/lib/tor/services/{}".format(tor_service['name'])) assert f.is_directory - # TODO: tor might mark these dirs as setgid - assert oct(f.mode) == "0700" + assert f.mode == 0o700 assert f.user == "debian-tor" assert f.group == "debian-tor" @pytest.mark.parametrize('tor_service', sdvars.tor_services) -def test_tor_service_hostnames(File, Sudo, tor_service): +def test_tor_service_hostnames(host, tor_service): """ - Check contents of tor service hostname file. For normal Hidden Services, + Check contents of tor service hostname file. For normal Onion Services, the file should contain only hostname (.onion URL). For Authenticated - Hidden Services, it should also contain the HidServAuth cookie. + Onion Services, it should also contain the HidServAuth cookie. """ - # Declare regex only for THS; we'll build regex for ATHS only if # necessary, since we won't have the required values otherwise. - ths_hostname_regex = "[a-z0-9]{16}\.onion" + ths_hostname_regex = r"[a-z0-9]{16}\.onion" + ths_hostname_regex_v3 = r"[a-z0-9]{56}\.onion" - with Sudo(): - f = File("/var/lib/tor/services/{}/hostname".format( + with host.sudo(): + f = host.file("/var/lib/tor/services/{}/hostname".format( tor_service['name'])) assert f.is_file - assert oct(f.mode) == "0600" + assert f.mode == 0o600 assert f.user == "debian-tor" assert f.group == "debian-tor" # All hostnames should contain at *least* the hostname. - assert re.search(ths_hostname_regex, f.content) + assert re.search(ths_hostname_regex, f.content_string) - if tor_service['authenticated']: + if tor_service['authenticated'] and tor_service['version'] == 2: # HidServAuth regex is approximately [a-zA-Z0-9/+], but validating # the entire entry is sane, and we don't need to nitpick the # charset. aths_hostname_regex = ths_hostname_regex + " .{22} # client: " + \ tor_service['client'] - assert re.search("^{}$".format(aths_hostname_regex), f.content) + assert re.search("^{}$".format(aths_hostname_regex), f.content_string) + elif tor_service['authenticated'] and tor_service['version'] == 3: + # For authenticated version 3 onion services, the authorized_client + # directory will exist and contain a file called client.auth. + client_auth = host.file( + "/var/lib/tor/services/{}/authorized_clients/client.auth".format( + tor_service['name'])) + assert client_auth.is_file + elif tor_service['version'] == 2: + assert re.search("^{}$".format(ths_hostname_regex), f.content_string) else: - assert re.search("^{}$".format(ths_hostname_regex), f.content) + assert re.search("^{}$".format(ths_hostname_regex_v3), f.content_string) @pytest.mark.parametrize('tor_service', sdvars.tor_services) -def test_tor_services_config(File, tor_service): +def test_tor_services_config(host, tor_service): """ Ensure torrc file contains relevant lines for Hidden Service declarations. - All hidden services must include: + All Onion Services must include: * HiddenServiceDir * HiddenServicePort - Only authenticated hidden services must also include: + Only v2 authenticated Onion Services must also include: * HiddenServiceAuthorizeClient Check for each as appropriate. """ - f = File("/etc/tor/torrc") + f = host.file("/etc/tor/torrc") dir_regex = "HiddenServiceDir /var/lib/tor/services/{}".format( tor_service['name']) # We need at least one port, but it may be used for both config values. @@ -80,13 +89,29 @@ def test_tor_services_config(File, tor_service): except IndexError: local_port = remote_port + # Ensure that service is hardcoded to v2, for compatibility + # with newer versions of Tor, which default to v3. + if tor_service['version'] == 2: + version_string = "HiddenServiceVersion 2" + else: + version_string = "" + port_regex = "HiddenServicePort {} 127.0.0.1:{}".format( remote_port, local_port) assert f.contains("^{}$".format(dir_regex)) assert f.contains("^{}$".format(port_regex)) - if tor_service['authenticated']: + if version_string: + service_regex = "\n".join([dir_regex, version_string, port_regex]) + else: + service_regex = "\n".join([dir_regex, port_regex]) + + if tor_service['authenticated'] and tor_service['version'] == 2: auth_regex = "HiddenServiceAuthorizeClient stealth {}".format( tor_service['client']) assert f.contains("^{}$".format(auth_regex)) + service_regex += "\n{}".format(auth_regex) + + # Check for block in file, to ensure declaration order + assert service_regex in f.content_string diff --git a/testinfra/common/test_cron_apt.py b/molecule/testinfra/staging/common/test_cron_apt.py similarity index 63% rename from testinfra/common/test_cron_apt.py rename to molecule/testinfra/staging/common/test_cron_apt.py --- a/testinfra/common/test_cron_apt.py +++ b/molecule/testinfra/staging/common/test_cron_apt.py @@ -2,11 +2,14 @@ import re +test_vars = pytest.securedrop_test_vars + + @pytest.mark.parametrize('dependency', [ 'cron-apt', 'ntp' ]) -def test_cron_apt_dependencies(Package, dependency): +def test_cron_apt_dependencies(host, dependency): """ Ensure critical packages are installed. If any of these are missing, the system will fail to receive automatic updates. @@ -17,65 +20,83 @@ def test_cron_apt_dependencies(Package, dependency): problematic. With better procedures in place regarding apt repo maintenance, we can ensure the field is populated going forward. """ - assert Package(dependency).is_installed + assert host.package(dependency).is_installed -def test_cron_apt_config(File): +def test_cron_apt_config(host): """ Ensure custom cron-apt config file is present. """ - f = File('/etc/cron-apt/config') + f = host.file('/etc/cron-apt/config') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains('^SYSLOGON="always"$') assert f.contains('^EXITON=error$') @pytest.mark.parametrize('repo', [ - 'deb http://security.ubuntu.com/ubuntu trusty-security main', - 'deb-src http://security.ubuntu.com/ubuntu trusty-security main', - 'deb http://security.ubuntu.com/ubuntu trusty-security universe', - 'deb-src http://security.ubuntu.com/ubuntu trusty-security universe', - 'deb [arch=amd64] https://apt.freedom.press trusty main', - 'deb https://tor-apt.freedom.press trusty main', + 'deb http://security.ubuntu.com/ubuntu {securedrop_target_platform}-security main', + 'deb-src http://security.ubuntu.com/ubuntu {securedrop_target_platform}-security main', + 'deb http://security.ubuntu.com/ubuntu {securedrop_target_platform}-security universe', + 'deb-src http://security.ubuntu.com/ubuntu {securedrop_target_platform}-security universe', + 'deb [arch=amd64] {fpf_apt_repo_url} {securedrop_target_platform} main', ]) -def test_cron_apt_repo_list(File, repo): +def test_cron_apt_repo_list(host, repo): """ Ensure the correct apt repositories are specified in the security list for apt. """ - f = File('/etc/apt/security.list') + repo_config = repo.format( + fpf_apt_repo_url=test_vars.fpf_apt_repo_url, + securedrop_target_platform=host.system_info.codename + ) + f = host.file('/etc/apt/security.list') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" - repo_regex = '^{}$'.format(re.escape(repo)) + assert f.mode == 0o644 + repo_regex = '^{}$'.format(re.escape(repo_config)) assert f.contains(repo_regex) -def test_cron_apt_repo_config_update(File): +def test_cron_apt_repo_config_update(host): """ Ensure cron-apt updates repos from the security.list config. """ - f = File('/etc/cron-apt/action.d/0-update') + f = host.file('/etc/cron-apt/action.d/0-update') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 repo_config = str('update -o quiet=2' ' -o Dir::Etc::SourceList=/etc/apt/security.list' ' -o Dir::Etc::SourceParts=""') assert f.contains('^{}$'.format(repo_config)) -def test_cron_apt_repo_config_upgrade(File): +def test_cron_apt_delete_vanilla_kernels(host): + """ + Ensure cron-apt removes generic linux image packages when installed. + """ + + f = host.file('/etc/cron-apt/action.d/9-remove') + assert f.is_file + assert f.user == "root" + assert f.mode == 0o644 + command = str('remove -y' + ' linux-image-generic-lts-xenial linux-image-.*generic' + ' -o quiet=2') + assert f.contains('^{}$'.format(command)) + + +def test_cron_apt_repo_config_upgrade(host): """ Ensure cron-apt upgrades packages from the security.list config. """ - f = File('/etc/cron-apt/action.d/5-security') + f = host.file('/etc/cron-apt/action.d/5-security') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 assert f.contains('^autoclean -y$') repo_config = str('dist-upgrade -y -o APT::Get::Show-Upgraded=true' ' -o Dir::Etc::SourceList=/etc/apt/security.list' @@ -84,11 +105,11 @@ def test_cron_apt_repo_config_upgrade(File): assert f.contains(re.escape(repo_config)) -def test_cron_apt_config_deprecated(File): +def test_cron_apt_config_deprecated(host): """ Ensure default cron-apt file to download all updates does not exist. """ - f = File('/etc/cron-apt/action.d/3-download') + f = host.file('/etc/cron-apt/action.d/3-download') assert not f.exists @@ -100,16 +121,16 @@ def test_cron_apt_config_deprecated(File): {'job': '0 5 * * * root /sbin/reboot', 'state': 'absent'}, ]) -def test_cron_apt_cron_jobs(File, cron_job): +def test_cron_apt_cron_jobs(host, cron_job): """ Check for correct cron job for upgrading all packages and rebooting. We'll also check for absence of previous versions of the cron job, to make sure those have been cleaned up via the playbooks. """ - f = File('/etc/cron.d/cron-apt') + f = host.file('/etc/cron.d/cron-apt') assert f.is_file assert f.user == "root" - assert oct(f.mode) == "0644" + assert f.mode == 0o644 regex_job = '^{}$'.format(re.escape(cron_job['job'])) if cron_job['state'] == 'present': @@ -118,7 +139,7 @@ def test_cron_apt_cron_jobs(File, cron_job): assert not f.contains(regex_job) -def test_cron_apt_all_packages_updated(Command): +def test_cron_apt_all_packages_updated(host): """ Ensure a safe-upgrade has already been run, by checking that no packages are eligible for upgrade currently. @@ -127,7 +148,7 @@ def test_cron_apt_all_packages_updated(Command): for use with Selenium. Therefore apt will report it's possible to upgrade Firefox, which we'll need to mark as "OK" in terms of the tests. """ - c = Command('aptitude --simulate -y safe-upgrade') + c = host.run('aptitude --simulate -y safe-upgrade') assert c.rc == 0 # Staging hosts will have locally built deb packages, marked as held. # Staging and development will have a version-locked Firefox pinned for diff --git a/testinfra/common/test_fpf_apt_repo.py b/molecule/testinfra/staging/common/test_fpf_apt_repo.py similarity index 65% rename from testinfra/common/test_fpf_apt_repo.py rename to molecule/testinfra/staging/common/test_fpf_apt_repo.py --- a/testinfra/common/test_fpf_apt_repo.py +++ b/molecule/testinfra/staging/common/test_fpf_apt_repo.py @@ -1,7 +1,11 @@ import pytest +import re -def test_fpf_apt_repo_present(File): +test_vars = pytest.securedrop_test_vars + + +def test_fpf_apt_repo_present(host): """ Ensure the FPF apt repo, apt.freedom.press, is configured. This repository is necessary for the SecureDrop Debian packages, @@ -15,12 +19,20 @@ def test_fpf_apt_repo_present(File): installed, e.g. for OSSEC. Install state for those packages is tested separately. """ - f = File('/etc/apt/sources.list.d/apt_freedom_press.list') - assert f.contains('^deb \[arch=amd64\] https:\/\/apt\.freedom\.press ' - 'trusty main$') + + # If the var fpf_apt_repo_url test var is apt-test, validate that the + # apt repository is configured on the host + if test_vars.fpf_apt_repo_url == "https://apt-test.freedom.press": + f = host.file('/etc/apt/sources.list.d/apt_test_freedom_press.list') + else: + f = host.file('/etc/apt/sources.list.d/apt_freedom_press.list') + repo_regex = r'^deb \[arch=amd64\] {} {} main$'.format( + re.escape(test_vars.fpf_apt_repo_url), + re.escape(host.system_info.codename)) + assert f.contains(repo_regex) -def test_fpf_apt_repo_fingerprint(Command): +def test_fpf_apt_repo_fingerprint(host): """ Ensure the FPF apt repo has the correct fingerprint on the associated signing pubkey. The key changed in October 2016, so test for the @@ -28,11 +40,11 @@ def test_fpf_apt_repo_fingerprint(Command): `securedrop-keyring` package. """ - c = Command('apt-key finger') + c = host.run('apt-key finger') fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg --------------------------------------------- -pub 4096R/00F4AD77 2016-10-20 [expires: 2018-10-05] +pub 4096R/00F4AD77 2016-10-20 [expires: 2020-06-30] Key fingerprint = 2224 5C81 E3BA EB41 38B3 6061 310F 5612 00F4 AD77 uid SecureDrop Release Signing Key""" @@ -47,12 +59,12 @@ def test_fpf_apt_repo_fingerprint(Command): 'uid Freedom of the Press Foundation Master Signing Key', 'B89A 29DB 2128 160B 8E4B 1B4C BADD E0C7 FC9F 6818', ]) -def test_fpf_apt_repo_old_pubkeys_absent(Command, old_pubkey): +def test_fpf_apt_repo_old_pubkeys_absent(host, old_pubkey): """ Ensure that expired (or about-to-expire) public keys for the FPF apt repo are NOT present. Updates to the securedrop-keyring package should enforce clobbering of old pubkeys, and this check will confirm absence. """ - c = Command('apt-key finger') + c = host.run('apt-key finger') assert old_pubkey not in c.stdout diff --git a/testinfra/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py similarity index 57% rename from testinfra/common/test_grsecurity.py rename to molecule/testinfra/staging/common/test_grsecurity.py --- a/testinfra/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -1,35 +1,34 @@ import pytest -import os import re -def test_ssh_motd_disabled(File): +KERNEL_VERSION = pytest.securedrop_test_vars.grsec_version + + +def test_ssh_motd_disabled(host): """ Ensure the SSH MOTD (Message of the Day) is disabled. Grsecurity balks at Ubuntu's default MOTD. """ - f = File("/etc/pam.d/sshd") + f = host.file("/etc/pam.d/sshd") assert f.is_file - assert not f.contains("pam\.motd") + assert not f.contains(r"pam\.motd") [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize("package", [ + 'linux-image-{}-grsec-securedrop'.format(KERNEL_VERSION), 'paxctl', 'securedrop-grsec', ]) -def test_grsecurity_apt_packages(Package, package): +def test_grsecurity_apt_packages(host, package): """ Ensure the grsecurity-related apt packages are present on the system. Includes the FPF-maintained metapackage, as well as paxctl, for managing PaX flags on binaries. """ - assert Package(package).is_installed + assert host.package(package).is_installed [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize("package", [ 'linux-signed-image-generic-lts-utopic', 'linux-signed-image-generic', @@ -38,7 +37,7 @@ def test_grsecurity_apt_packages(Package, package): '^linux-image-.*generic$', '^linux-headers-.*', ]) -def test_generic_kernels_absent(Command, package): +def test_generic_kernels_absent(host, package): """ Ensure the default Ubuntu-provided kernel packages are absent. In the past, conflicting version numbers have caused machines @@ -49,54 +48,46 @@ def test_generic_kernels_absent(Command, package): # Can't use the TestInfra Package module to check state=absent, # so let's check by shelling out to `dpkg -l`. Dpkg will automatically # honor simple regex in package names. - c = Command("dpkg -l {}".format(package)) + c = host.run("dpkg -l {}".format(package)) assert c.rc == 1 error_text = "dpkg-query: no packages found matching {}".format(package) - assert c.stderr == error_text + assert error_text in c.stderr.strip() [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") -def test_grsecurity_lock_file(File): +def test_grsecurity_lock_file(host): """ Ensure system is rerunning a grsecurity kernel by testing for the `grsec_lock` file, which is automatically created by grsecurity. """ - f = File("/proc/sys/kernel/grsecurity/grsec_lock") - assert oct(f.mode) == "0600" + f = host.file("/proc/sys/kernel/grsecurity/grsec_lock") + assert f.mode == 0o600 assert f.user == "root" assert f.size == 0 [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") -def test_grsecurity_kernel_is_running(Command): +def test_grsecurity_kernel_is_running(host): """ Make sure the currently running kernel is specific grsec kernel. """ - c = Command('uname -r') - assert c.stdout.endswith('-grsec') - assert c.stdout == '3.14.79-grsec' + c = host.run('uname -r') + assert c.stdout.strip().endswith('-grsec-securedrop') + assert c.stdout.strip() == '{}-grsec-securedrop'.format(KERNEL_VERSION) [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize('sysctl_opt', [ ('kernel.grsecurity.grsec_lock', 1), ('kernel.grsecurity.rwxmap_logging', 0), ('vm.heap_stack_gap', 1048576), ]) -def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt): +def test_grsecurity_sysctl_options(host, sysctl_opt): """ Check that the grsecurity-related sysctl options are set correctly. In production the RWX logging is disabled, to reduce log noise. """ - with Sudo(): - assert Sysctl(sysctl_opt[0]) == sysctl_opt[1] + with host.sudo(): + assert host.sysctl(sysctl_opt[0]) == sysctl_opt[1] [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") @pytest.mark.parametrize('paxtest_check', [ "Executable anonymous mapping", "Executable bss", @@ -116,52 +107,47 @@ def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt): "Return to function (memcpy)", "Return to function (memcpy, PIE)", ]) -def test_grsecurity_paxtest(Command, Sudo, paxtest_check): +def test_grsecurity_paxtest(host, paxtest_check): """ Check that paxtest does not report anything vulnerable Requires the package paxtest to be installed. The paxtest package is currently being installed in the app-test role. """ - if Command.exists("/usr/bin/paxtest"): - with Sudo(): - c = Command("paxtest blackhat") + if host.exists("/usr/bin/paxtest"): + with host.sudo(): + c = host.run("paxtest blackhat") assert c.rc == 0 assert "Vulnerable" not in c.stdout - regex = "^{}\s*:\sKilled$".format(re.escape(paxtest_check)) + regex = r"^{}\s*:\sKilled$".format(re.escape(paxtest_check)) assert re.search(regex, c.stdout) [email protected](os.environ.get('FPF_CI', 'false') == "true", - reason="Not needed in CI environment") -def test_grub_pc_marked_manual(Command): +def test_grub_pc_marked_manual(host): """ Ensure the `grub-pc` packaged is marked as manually installed. This is necessary for VirtualBox with Vagrant. """ - c = Command('apt-mark showmanual grub-pc') + c = host.run('apt-mark showmanual grub-pc') assert c.rc == 0 - assert c.stdout == "grub-pc" + assert c.stdout.strip() == "grub-pc" [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") -def test_apt_autoremove(Command): +def test_apt_autoremove(host): """ Ensure old packages have been autoremoved. """ - c = Command('apt-get --dry-run autoremove') + c = host.run('apt-get --dry-run autoremove') assert c.rc == 0 assert "The following packages will be REMOVED" not in c.stdout [email protected](os.environ.get('FPF_GRSEC', 'true') == "false", - reason="Need to skip in environment w/o grsec") [email protected](reason="PaX flags unset at install time, see issue #3916") @pytest.mark.parametrize("binary", [ "/usr/sbin/grub-probe", "/usr/sbin/grub-mkdevicemap", "/usr/bin/grub-script-check", ]) -def test_pax_flags(Command, File, binary): +def test_pax_flags(host, binary): """ Ensure PaX flags are set correctly on critical Grub binaries. These flags are maintained as part of a post-install kernel hook @@ -169,11 +155,11 @@ def test_pax_flags(Command, File, binary): the machine may fail to boot into a new kernel. """ - f = File("/etc/kernel/postinst.d/paxctl-grub") + f = host.file("/etc/kernel/postinst.d/paxctl-grub") assert f.is_file assert f.contains("^paxctl -zCE {}".format(binary)) - c = Command("paxctl -v {}".format(binary)) + c = host.run("paxctl -v {}".format(binary)) assert c.rc == 0 assert "- PaX flags: --------E--- [{}]".format(binary) in c.stdout @@ -182,3 +168,55 @@ def test_pax_flags(Command, File, binary): # the "p" and "m" flags. assert "PAGEEXEC is disabled" not in c.stdout assert "MPROTECT is disabled" not in c.stdout + + [email protected]('kernel_opts', [ + 'WLAN', + 'NFC', + 'WIMAX', + 'WIRELESS', + 'HAMRADIO', + 'IRDA', + 'BT', +]) +def test_wireless_disabled_in_kernel_config(host, kernel_opts): + """ + Kernel modules for wireless are blacklisted, but we go one step further and + remove wireless support from the kernel. Let's make sure wireless is + disabled in the running kernel config! + """ + + kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) + kernel_config = host.file(kernel_config_path).content_string + + line = "# CONFIG_{} is not set".format(kernel_opts) + assert line in kernel_config + + [email protected]('kernel_opts', [ + 'CONFIG_X86_INTEL_TSX_MODE_OFF', + 'CONFIG_PAX', + 'CONFIG_GRKERNSEC', +]) +def test_kernel_options_enabled_config(host, kernel_opts): + """ + Tests kernel config for options that should be enabled + """ + + kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) + kernel_config = host.file(kernel_config_path).content_string + + line = "{}=y".format(kernel_opts) + assert line in kernel_config + + +def test_mds_mitigations_and_smt_disabled(host): + """ + Ensure that full mitigations are in place for MDS + see https://www.kernel.org/doc/html/latest/admin-guide/hw-vuln/mds.html + """ + + grub_config_path = "/boot/grub/grub.cfg" + grub_config = host.file(grub_config_path) + + assert grub_config.contains("mds=full,nosmt") diff --git a/testinfra/common/test_ip6tables.py b/molecule/testinfra/staging/common/test_ip6tables.py similarity index 64% rename from testinfra/common/test_ip6tables.py rename to molecule/testinfra/staging/common/test_ip6tables.py --- a/testinfra/common/test_ip6tables.py +++ b/molecule/testinfra/staging/common/test_ip6tables.py @@ -1,8 +1,4 @@ -import pytest - - [email protected]() -def test_ip6tables_drop_everything(Command, Sudo): +def test_ip6tables_drop_everything(host): """ Ensure that all IPv6 packets are dropped by default. The IPv4 rules are more complicated, and tested separately. @@ -13,6 +9,6 @@ def test_ip6tables_drop_everything(Command, Sudo): -P OUTPUT DROP """.lstrip().rstrip() - with Sudo(): - c = Command.check_output("ip6tables -S") + with host.sudo(): + c = host.check_output("ip6tables -S") assert c == desired_ip6tables_output diff --git a/testinfra/common/test_platform.py b/molecule/testinfra/staging/common/test_platform.py similarity index 53% rename from testinfra/common/test_platform.py rename to molecule/testinfra/staging/common/test_platform.py --- a/testinfra/common/test_platform.py +++ b/molecule/testinfra/staging/common/test_platform.py @@ -1,3 +1,8 @@ +# We expect Ubuntu Xenial +SUPPORTED_CODENAMES = ('xenial') +SUPPORTED_RELEASES = ('16.04') + + def test_ansible_version(host): """ Check that a supported version of Ansible is being used. @@ -11,14 +16,11 @@ def test_ansible_version(host): assert c.startswith("ansible 2.") -def test_platform(SystemInfo): +def test_platform(host): """ - SecureDrop requires Ubuntu Trusty 14.04 LTS. The shelf life - of that release means we'll need to migrate to Xenial LTS - at some point; until then, require hosts to be running - Ubuntu. + SecureDrop requires Ubuntu Ubuntu 16.04 LTS. """ - assert SystemInfo.type == "linux" - assert SystemInfo.distribution == "ubuntu" - assert SystemInfo.codename == "trusty" - assert SystemInfo.release == "14.04" + assert host.system_info.type == "linux" + assert host.system_info.distribution == "ubuntu" + assert host.system_info.codename in SUPPORTED_CODENAMES + assert host.system_info.release in SUPPORTED_RELEASES diff --git a/molecule/testinfra/staging/common/test_release_upgrades.py b/molecule/testinfra/staging/common/test_release_upgrades.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/common/test_release_upgrades.py @@ -0,0 +1,26 @@ +def test_release_manager_upgrade_channel(host): + """ + Ensures that the `do-release-upgrade` command will not + suggest upgrades from Xenial to Bionic (which is untested + and unsupported.) + """ + expected_channels = { + "xenial": "never", + } + + config_path = "/etc/update-manager/release-upgrades" + assert host.file(config_path).is_file + + raw_output = host.check_output("grep '^Prompt' {}".format(config_path)) + _, channel = raw_output.split("=") + + expected_channel = expected_channels[host.system_info.codename] + assert channel == expected_channel + + +def test_do_release_upgrade_is_installed(host): + """ + Ensure the `do-release-upgrade` command is present on target systems, + so that instance Admins can upgrade from Trusty to Xenial. + """ + assert host.exists("do-release-upgrade") diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/staging/common/test_system_hardening.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/common/test_system_hardening.py @@ -0,0 +1,143 @@ +import pytest +import re + +testinfra_hosts = ["app", "app-staging", "mon", "mon-staging"] + + [email protected]('sysctl_opt', [ + ('net.ipv4.conf.all.accept_redirects', 0), + ('net.ipv4.conf.all.accept_source_route', 0), + ('net.ipv4.conf.all.rp_filter', 1), + ('net.ipv4.conf.all.secure_redirects', 0), + ('net.ipv4.conf.all.send_redirects', 0), + ('net.ipv4.conf.default.accept_redirects', 0), + ('net.ipv4.conf.default.accept_source_route', 0), + ('net.ipv4.conf.default.rp_filter', 1), + ('net.ipv4.conf.default.secure_redirects', 0), + ('net.ipv4.conf.default.send_redirects', 0), + ('net.ipv4.icmp_echo_ignore_broadcasts', 1), + ('net.ipv4.ip_forward', 0), + ('net.ipv4.tcp_max_syn_backlog', 4096), + ('net.ipv4.tcp_syncookies', 1), + ('net.ipv6.conf.all.disable_ipv6', 1), + ('net.ipv6.conf.default.disable_ipv6', 1), + ('net.ipv6.conf.lo.disable_ipv6', 1), +]) +def test_sysctl_options(host, sysctl_opt): + """ + Ensure sysctl flags are set correctly. Most of these checks + are disabling IPv6 and hardening IPv4, which is appropriate + due to the heavy use of Tor. + """ + with host.sudo(): + assert host.sysctl(sysctl_opt[0]) == sysctl_opt[1] + + +def test_dns_setting(host): + """ + Ensure DNS service is hard-coded in resolv.conf config. + """ + f = host.file('/etc/resolvconf/resolv.conf.d/base') + assert f.is_file + assert f.user == "root" + assert f.group == "root" + assert f.mode == 0o644 + assert f.contains(r'^nameserver 8\.8\.8\.8$') + + [email protected]('kernel_module', [ + 'bluetooth', + 'iwlwifi', +]) +def test_blacklisted_kernel_modules(host, kernel_module): + """ + Test that unwanted kernel modules are blacklisted on the system. + Mostly these checks are defense-in-depth approaches to ensuring + that wireless interfaces will not work. + """ + with host.sudo(): + c = host.run("lsmod") + assert kernel_module not in c.stdout + + f = host.file("/etc/modprobe.d/blacklist.conf") + assert f.contains("^blacklist {}$".format(kernel_module)) + + +def test_swap_disabled(host): + """ + Ensure swap space is disabled. Prohibit writing memory to swapfiles + to reduce the threat of forensic analysis leaking any sensitive info. + """ + hostname = host.check_output('hostname') + + # Mon doesn't have swap disabled yet + if hostname.startswith('mon'): + return True + + c = host.check_output('swapon --summary') + # A leading slash will indicate full path to a swapfile. + assert not re.search("^/", c, re.M) + + # On Xenial, swapon 2.27.1 shows blank output, with no headers, so + # check for empty output as confirmation of no swap. + rgx = re.compile("^$") + + assert re.search(rgx, c) + + +def test_twofactor_disabled_on_tty(host): + """ + Having 2FA on TTY logins is cumbersome on systems without encrypted drives. + Let's make sure this option is disabled! + """ + + pam_auth_file = host.file("/etc/pam.d/common-auth").content_string + + assert "auth required pam_google_authenticator.so" not in pam_auth_file + assert "pam_ecryptfs.so unwrap" not in pam_auth_file + + [email protected]('sshd_opts', [ + ('UsePAM', 'no'), + ('ChallengeResponseAuthentication', 'no'), + ('PasswordAuthentication', 'no'), + ('PubkeyAuthentication', 'yes'), + ('RSAAuthentication', 'yes'), +]) +def test_sshd_config(host, sshd_opts): + """ + Let's ensure sshd does not fall back to password-based authentication + """ + + sshd_config_file = host.file("/etc/ssh/sshd_config").content_string + + line = "{} {}".format(sshd_opts[0], sshd_opts[1]) + assert line in sshd_config_file + + [email protected]('logfile', [ + '/var/log/auth.log', + '/var/log/syslog', +]) +def test_no_ecrypt_messages_in_logs(host, logfile): + """ + Ensure pam_ecryptfs is removed from /etc/pam.d/common-auth : not only is + no longer needed, it causes error messages (see issue #3963) + """ + error_message = "pam_ecryptfs.so: cannot open shared object file" + with host.sudo(): + f = host.file(logfile) + # Not using `f.contains(<pattern>)` because that'd cause the sought + # string to make it into syslog as a side-effect of the testinfra + # invocation, causing subsequent test runs to report failure. + assert error_message not in f.content_string + + [email protected]('package', [ + 'libiw30', + 'wpasupplicant', + 'wireless-tools', +]) +def test_unused_packages_are_removed(host, package): + """ Check if unused package is present """ + assert host.package(package).is_installed is False diff --git a/testinfra/common/test_tor_mirror.py b/molecule/testinfra/staging/common/test_tor_mirror.py similarity index 62% rename from testinfra/common/test_tor_mirror.py rename to molecule/testinfra/staging/common/test_tor_mirror.py --- a/testinfra/common/test_tor_mirror.py +++ b/molecule/testinfra/staging/common/test_tor_mirror.py @@ -1,20 +1,17 @@ -import os import pytest [email protected]( - os.environ.get('CIRCLE_BRANCH', 'na').startswith('release'), - reason="Release branches will use tor-apt-test repo") -def test_tor_mirror_present(host): [email protected]('repo_file', [ + "/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list", +]) +def test_tor_mirror_absent(host, repo_file): """ - Ensure the FPF mirror of the Tor apt repo, tor-apt.freedom.press, - is configured. This repository required manual updating with current - tor releases, to avoid breakage of untested updates. + Ensure that neither the Tor Project repo, nor the FPF mirror of the + Tor Project repo, tor-apt.freedom.press, are configured. We've moved + to hosting Tor packages inside the primary FPF apt repo. """ - f = '/etc/apt/sources.list.d/tor_apt_freedom_press.list' - - regex = ('^deb https:\/\/tor-apt\.freedom\.press trusty main$') - assert host.file(f).contains(regex) + f = host.file(repo_file) + assert not f.exists def test_tor_keyring_absent(host): @@ -31,7 +28,7 @@ def test_tor_keyring_absent(host): c = host.run("dpkg -l {}".format(package)) assert c.rc == 1 error_text = "dpkg-query: no packages found matching {}".format(package) - assert c.stderr.rstrip() == error_text + assert error_text in c.stderr.strip() @pytest.mark.parametrize('tor_key_info', [ @@ -54,27 +51,20 @@ def test_tor_mirror_fingerprint(host, tor_key_info): assert tor_key_info not in c.stdout [email protected]('filename', [ - '/etc/apt/security.list', - '/etc/apt/sources.list.d', [email protected]('repo_pattern', [ + 'deb.torproject.org', + 'tor-apt.freedom.press', + 'tor-apt-test.freedom.press', ]) -def test_tor_project_repo_absent(host, filename): +def test_tor_repo_absent(host, repo_pattern): """ Ensure that no apt source list files contain the entry for the official Tor apt repo, since we don't control issuing updates in that repo. We're mirroring it to avoid breakage caused by untested updates (which has broken prod twice to date). """ - c = host.run("grep -riP 'deb\.torproject\.org' {}".format(filename)) + cmd = "grep -rF '{}' /etc/apt/".format(repo_pattern) + c = host.run(cmd) # Grep returns non-zero when no matches, and we want no matches. assert c.rc != 0 assert c.stdout == "" - - -def test_tor_project_repo_files_absent(host): - """ - Ensure that specific apt source list files are absent, - having been 'hidden' via the securedrop-config package. - """ - f = "/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list" - assert not host.file(f).exists diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/staging/common/test_user_config.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/common/test_user_config.py @@ -0,0 +1,94 @@ +import re +import textwrap + + +def test_sudoers_config(host): + """ + Check sudoers config for passwordless sudo via group membership, + as well as environment-related hardening. + """ + f = host.file("/etc/sudoers") + assert f.is_file + assert f.user == "root" + assert f.group == "root" + assert f.mode == 0o440 + + # Restrictive file mode requires sudo for reading, so let's + # read once and store the content in a var. + with host.sudo(): + sudoers_config = f.content_string + + # Using re.search rather than `f.contains` since the basic grep + # matching doesn't support PCRE, so `\s` won't work. + assert re.search(r'^Defaults\s+env_reset$', sudoers_config, re.M) + assert re.search(r'^Defaults\s+env_reset$', sudoers_config, re.M) + assert re.search(r'^Defaults\s+mail_badpass$', sudoers_config, re.M) + assert re.search(r'Defaults\s+secure_path="/usr/local/sbin:' + r'/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', + sudoers_config, re.M) + assert re.search(r'^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', + sudoers_config, re.M) + assert re.search(r'Defaults:%sudo\s+!requiretty', sudoers_config, re.M) + + +def test_sudoers_tmux_env(host): + """ + Ensure SecureDrop-specific bashrc additions are present. + This checks for automatic tmux start on interactive shells. + If we switch to byobu, we can set `byobu-enabled` and check + the corresponding settings there. + """ + + host_file = host.file('/etc/profile.d/securedrop_additions.sh') + expected_content = textwrap.dedent( + """\ + [[ $- != *i* ]] && return + + which tmux >/dev/null 2>&1 || return + + tmux_attach_via_proc() { + # If the tmux package is upgraded during the lifetime of a + # session, attaching with the new binary can fail due to different + # protocol versions. This function attaches using the reference to + # the old executable found in the /proc tree of an existing + # session. + pid=$(pgrep --newest tmux) + if test -n "$pid" + then + /proc/$pid/exe attach + fi + return 1 + } + + if test -z "$TMUX" + then + (tmux attach || tmux_attach_via_proc || tmux new-session) + fi""" + ) + assert host_file.content_string.strip() == expected_content + + +def test_tmux_installed(host): + """ + Ensure the `tmux` package is present, since it's required for the user env. + When running an interactive SSH session over Tor, tmux should be started + automatically, to prevent problems if the connection is broken + unexpectedly, as sometimes happens over Tor. The Admin will be able to + reconnect to the running tmux session and review command output. + """ + assert host.package("tmux").is_installed + + +def test_sudoers_tmux_env_deprecated(host): + """ + Previous version of the Ansible config set the tmux config + in per-user ~/.bashrc, which was redundant. The config has + since moved to /etc/profile.d, to provide a single point of + update that applies to all users. Let's make sure that the + old setting isn't still active. + """ + + admin_user = "vagrant" + + f = host.file("/home/{}/.bashrc".format(admin_user)) + assert not f.contains(r"^. \/etc\/bashrc\.securedrop_additions$") diff --git a/testinfra/mon/test_network.py b/molecule/testinfra/staging/mon/test_mon_network.py similarity index 62% rename from testinfra/mon/test_network.py rename to molecule/testinfra/staging/mon/test_mon_network.py --- a/testinfra/mon/test_network.py +++ b/molecule/testinfra/staging/mon/test_mon_network.py @@ -1,39 +1,40 @@ +import io import os import difflib import pytest from jinja2 import Template +testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars [email protected]() -def test_mon_iptables_rules(SystemInfo, Command, Sudo): +def test_mon_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison kwargs = dict( app_ip=os.environ.get('APP_IP', securedrop_test_vars.app_ip), - default_interface=Command.check_output( + default_interface=host.check_output( "ip r | head -n 1 | awk '{ print $5 }'"), - tor_user_id=Command.check_output("id -u debian-tor"), - ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"), - postfix_user_id=Command.check_output("id -u postfix"), + tor_user_id=host.check_output("id -u debian-tor"), + ssh_group_gid=host.check_output("getent group ssh | cut -d: -f3"), + postfix_user_id=host.check_output("id -u postfix"), dns_server=securedrop_test_vars.dns_server) # Build iptables scrape cmd, purge comments + counters - iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" + iptables = r"iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'" environment = os.environ.get("CI_SD_ENV", "staging") iptables_file = "{}/iptables-mon-{}.j2".format( os.path.dirname(os.path.abspath(__file__)), environment) # template out a local iptables jinja file - jinja_iptables = Template(open(iptables_file, 'r').read()) + jinja_iptables = Template(io.open(iptables_file, 'r').read()) iptables_expected = jinja_iptables.render(**kwargs) - with Sudo(): + with host.sudo(): # Actually run the iptables scrape command - iptables = Command.check_output(iptables) + iptables = host.check_output(iptables) # print diff comparison (only shows up in pytests if test fails or # verbosity turned way up) for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'), @@ -49,8 +50,7 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo): dict(host="0.0.0.0", proto="udp", port=1514, listening=True), dict(host="0.0.0.0", proto="tcp", port=1515, listening=False), ]) [email protected] -def test_listening_ports(Socket, Sudo, ossec_service): +def test_listening_ports(host, ossec_service): """ Ensure the OSSEC-related services are listening on the expected sockets. Services to check include ossec-remoted @@ -62,5 +62,17 @@ def test_listening_ports(Socket, Sudo, ossec_service): to config test YAML vars at that point. """ socket = "{proto}://{host}:{port}".format(**ossec_service) - with Sudo(): - assert Socket(socket).is_listening == ossec_service['listening'] + with host.sudo(): + # Really hacky work-around for bug found in testinfra 1.12.0 + # https://github.com/philpep/testinfra/issues/311 + if "udp" in socket: + lsof_socket = "{proto}@{host}:{port}".format(**ossec_service) + udp_check = host.run("lsof -n -i"+lsof_socket) + + if ossec_service['listening']: + assert udp_check.rc == 0 + else: + assert udp_check.rc == 1 + else: + assert (host.socket(socket).is_listening == + ossec_service['listening']) diff --git a/testinfra/mon/test_ossec_ruleset.py b/molecule/testinfra/staging/mon/test_ossec_ruleset.py similarity index 61% rename from testinfra/mon/test_ossec_ruleset.py rename to molecule/testinfra/staging/mon/test_ossec_ruleset.py --- a/testinfra/mon/test_ossec_ruleset.py +++ b/molecule/testinfra/staging/mon/test_ossec_ruleset.py @@ -1,7 +1,7 @@ import pytest import re - +testinfra_hosts = ["mon-staging"] alert_level_regex = re.compile(r"Level: '(\d+)'") rule_id_regex = re.compile(r"Rule id: '(\d+)'") sdvars = pytest.securedrop_test_vars @@ -9,19 +9,19 @@ @pytest.mark.parametrize('log_event', sdvars.log_events_without_ossec_alerts) -def test_ossec_false_positives_suppressed(Command, Sudo, log_event): - with Sudo(): - c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( - log_event["alert"])) +def test_ossec_false_positives_suppressed(host, log_event): + with host.sudo(): + c = host.run('echo "{}" | /var/ossec/bin/ossec-logtest'.format( + log_event["alert"])) assert "Alert to be generated" not in c.stderr @pytest.mark.parametrize('log_event', sdvars.log_events_with_ossec_alerts) -def test_ossec_expected_alerts_are_present(Command, Sudo, log_event): - with Sudo(): - c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( - log_event["alert"])) +def test_ossec_expected_alerts_are_present(host, log_event): + with host.sudo(): + c = host.run('echo "{}" | /var/ossec/bin/ossec-logtest'.format( + log_event["alert"])) assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == log_event["level"] diff --git a/molecule/testinfra/staging/mon/test_ossec_server.py b/molecule/testinfra/staging/mon/test_ossec_server.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/mon/test_ossec_server.py @@ -0,0 +1,98 @@ +import os +import pytest + + +testinfra_hosts = ["mon-staging"] +securedrop_test_vars = pytest.securedrop_test_vars + + +def test_ossec_connectivity(host): + """ + Ensure ossec-server machine has active connection to the ossec-agent. + The ossec service will report all available agents, and we can inspect + that list to make sure it's the host we expect. + """ + desired_output = "{}-{} is available.".format( + securedrop_test_vars.app_hostname, + os.environ.get('APP_IP', securedrop_test_vars.app_ip)) + with host.sudo(): + c = host.check_output("/var/ossec/bin/list_agents -a") + assert c == desired_output + + +# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] [email protected]('keyfile', [ + '/var/ossec/etc/sslmanager.key', + '/var/ossec/etc/sslmanager.cert', +]) +def test_ossec_keyfiles(host, keyfile): + """ + Ensure that the OSSEC transport key pair exists. These keys are used + to protect the connection between the ossec-server and ossec-agent. + + All this check does in confirm they're present, it doesn't perform any + matching checks to validate the configuration. + """ + with host.sudo(): + f = host.file(keyfile) + assert f.is_file + # The postinst scripts in the OSSEC deb packages set 440 on the + # keyfiles; the Ansible config should be updated to do the same. + assert f.mode == 0o440 + assert f.user == "root" + assert f.group == "ossec" + + +# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] +def test_procmail_log(host): + """ + Ensure procmail log file exist with proper ownership. + Only the ossec user should have read/write permissions. + """ + with host.sudo(): + f = host.file("/var/log/procmail.log") + assert f.is_file + assert f.user == "ossec" + assert f.group == "root" + assert f.mode == 0o660 + + +def test_ossec_authd(host): + """ Ensure that authd is not running """ + with host.sudo(): + c = host.run("pgrep ossec-authd") + assert c.stdout == "" + assert c.rc != 0 + + +def test_hosts_files(host): + """ Ensure host files mapping are in place """ + f = host.file('/etc/hosts') + + app_ip = os.environ.get('APP_IP', securedrop_test_vars.app_ip) + app_host = securedrop_test_vars.app_hostname + + assert f.contains('^127.0.0.1.*localhost') + assert f.contains(r'^{}\s*{}$'.format(app_ip, app_host)) + + +def test_ossec_log_contains_no_malformed_events(host): + """ + Ensure the OSSEC log reports no errors for incorrectly formatted + messages. These events indicate that the OSSEC server failed to decrypt + the event sent by the OSSEC agent, which implies a misconfiguration, + likely the IPv4 address or keypair differing from what's declared. + + Documentation regarding this error message can be found at: + http://ossec-docs.readthedocs.io/en/latest/faq/unexpected.html#id4 + """ + with host.sudo(): + f = host.file("/var/ossec/logs/ossec.log") + assert not f.contains("ERROR: Incorrectly formated message from") + + +def test_regression_hosts(host): + """ Regression test to check for duplicate entries. """ + assert host.check_output("uniq --repeated /etc/hosts") == "" diff --git a/molecule/testinfra/staging/mon/test_postfix.py b/molecule/testinfra/staging/mon/test_postfix.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/mon/test_postfix.py @@ -0,0 +1,55 @@ +import re +import pytest + + +testinfra_hosts = ["mon-staging"] +securedrop_test_vars = pytest.securedrop_test_vars + + [email protected]('header', [ + '/^X-Originating-IP:/ IGNORE', + '/^X-Mailer:/ IGNORE', + '/^Mime-Version:/ IGNORE', + '/^User-Agent:/ IGNORE', + '/^Received:/ IGNORE', +]) +def test_postfix_headers(host, header): + """ + Ensure postfix header filters are set correctly. Common mail headers + are stripped by default to avoid leaking metadata about the instance. + Message body is always encrypted prior to sending. + """ + f = host.file("/etc/postfix/header_checks") + assert f.is_file + assert f.mode == 0o644 + regex = '^{}$'.format(re.escape(header)) + assert re.search(regex, f.content_string, re.M) + + +def test_postfix_generic_maps(host): + """ + Regression test to check that generic Postfix maps are not configured + by default. As of #1565 Admins can opt-in to overriding the FROM address + used for sending OSSEC alerts, but by default we're preserving the old + `[email protected]` behavior, to avoid breaking email for previously + existing instances. + """ + assert not host.file("/etc/postfix/generic").exists + assert not host.file("/etc/postfix/main.cf").contains("^smtp_generic_maps") + + +def test_postfix_service(host): + """ + Check Postfix service. Postfix is used to deliver OSSEC alerts via + encrypted email. On staging hosts, Postfix is disabled, due to lack + of SASL authentication credentials, but on prod hosts it should run. + """ + # Elevated privileges are required to read Postfix service info, + # specifically `/var/spool/postfix/pid/master.pid`. + with host.sudo(): + postfix = host.service("postfix") + assert postfix.is_running == securedrop_test_vars.postfix_enabled + assert postfix.is_enabled == securedrop_test_vars.postfix_enabled + + socket = host.socket("tcp://127.0.0.1:25") + assert socket.is_listening == securedrop_test_vars.postfix_enabled diff --git a/molecule/testinfra/staging/ossec/test_journalist_mail.py b/molecule/testinfra/staging/ossec/test_journalist_mail.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/ossec/test_journalist_mail.py @@ -0,0 +1,254 @@ +import pytest +import os +import testinfra +import time + + +# DRY declaration of why we're skipping all these tests. +# For details, see https://github.com/freedomofpress/securedrop/issues/3689 +SKIP_REASON = "unimplemented, see GH#3689" + + +class TestBase(object): + + @pytest.fixture(autouse=True) + def only_mon_staging_sudo(self, host): + if host.backend.host != 'mon-staging': + pytest.skip() + + with host.sudo(): + yield + + def ansible(self, host, module, parameters): + r = host.ansible(module, parameters, check=False) + assert 'exception' not in r + + def run(self, host, cmd): + print(host.backend.host + " running: " + cmd) + r = host.run(cmd) + print(r.stdout) + print(r.stderr) + return r.rc == 0 + + def wait_for(self, fun): + success = False + for d in (1, 2, 4, 8, 16, 32, 64): + if fun(): + success = True + break + time.sleep(d) + return success + + def wait_for_command(self, host, cmd): + return self.wait_for(lambda: self.run(host, cmd)) + + # + # implementation note: we do not use host.ansible("service", ... + # because it only works for services in /etc/init and not those + # legacy only found in /etc/init.d such as postfix + # + def service_started(self, host, name): + assert self.run(host, "service {name} start".format(name=name)) + assert self.wait_for_command( + host, + "service {name} status | grep -q 'is running'".format(name=name)) + + def service_restarted(self, host, name): + assert self.run(host, "service {name} restart".format(name=name)) + assert self.wait_for_command( + host, + "service {name} status | grep -q 'is running'".format(name=name)) + + def service_stopped(self, host, name): + assert self.run(host, "service {name} stop".format(name=name)) + assert self.wait_for_command( + host, + "service {name} status | grep -q 'not running'".format(name=name)) + + +class TestJournalistMail(TestBase): + + @pytest.mark.skip(reason=SKIP_REASON) + def test_procmail(self, host): + self.service_started(host, "postfix") + for (destination, f) in ( + ('journalist', 'alert-journalist-one.txt'), + ('journalist', 'alert-journalist-two.txt'), + ('ossec', 'alert-ossec.txt')): + # Look up CWD, in case tests move in the future + current_dir = os.path.dirname(os.path.abspath(__file__)) + self.ansible(host, "copy", + "dest=/tmp/{f} src={d}/{f}".format(f=f, + d=current_dir)) + assert self.run(host, + "/var/ossec/process_submissions_today.sh forget") + assert self.run(host, "postsuper -d ALL") + assert self.run( + host, + "cat /tmp/{f} | mail -s 'abc' root@localhost".format(f=f)) + assert self.wait_for_command( + host, + "mailq | grep -q {destination}@ossec.test".format( + destination=destination)) + self.service_stopped(host, "postfix") + + @pytest.mark.skip(reason=SKIP_REASON) + def test_process_submissions_today(self, host): + assert self.run(host, + "/var/ossec/process_submissions_today.sh " + "test_handle_notification") + assert self.run(host, + "/var/ossec/process_submissions_today.sh " + "test_modified_in_the_past_24h") + + @pytest.mark.skip(reason=SKIP_REASON) + def test_send_encrypted_alert(self, host): + self.service_started(host, "postfix") + src = ("../../install_files/ansible-base/roles/ossec/files/" + "test_admin_key.sec") + self.ansible(host, "copy", + "dest=/tmp/test_admin_key.sec src={src}".format(src=src)) + + self.run(host, "gpg --homedir /var/ossec/.gnupg" + " --import /tmp/test_admin_key.sec") + + def trigger(who, payload): + assert self.run( + host, "! mailq | grep -q {who}@ossec.test".format(who=who)) + assert self.run( + host, + """ + ( echo 'Subject: TEST' ; echo ; echo -e '{payload}' ) | \ + /var/ossec/send_encrypted_alarm.sh {who} + """.format(who=who, payload=payload)) + assert self.wait_for_command( + host, "mailq | grep -q {who}@ossec.test".format(who=who)) + + # + # encrypted mail to journalist or ossec contact + # + for (who, payload, expected) in ( + ('journalist', 'JOURNALISTPAYLOAD', 'JOURNALISTPAYLOAD'), + ('ossec', 'OSSECPAYLOAD', 'OSSECPAYLOAD')): + assert self.run(host, "postsuper -d ALL") + trigger(who, payload) + assert self.run( + host, + """ + job=$(mailq | sed -n -e '2p' | cut -f1 -d ' ') + postcat -q $job | tee /dev/stderr | \ + gpg --homedir /var/ossec/.gnupg --decrypt 2>&1 | \ + grep -q {expected} + """.format(expected=expected)) + # + # failure to encrypt must trigger an emergency mail to ossec contact + # + try: + assert self.run(host, "postsuper -d ALL") + assert self.run(host, "mv /usr/bin/gpg /usr/bin/gpg.save") + trigger(who, 'MYGREATPAYLOAD') + assert self.run( + host, + """ + job=$(mailq | sed -n -e '2p' | cut -f1 -d ' ') + postcat -q $job | grep -q 'Failed to encrypt OSSEC alert' + """) + finally: + assert self.run(host, "mv /usr/bin/gpg.save /usr/bin/gpg") + self.service_stopped(host, "postfix") + + @pytest.mark.skip(reason=SKIP_REASON) + def test_missing_journalist_alert(self, host): + # + # missing journalist mail does nothing + # + assert self.run( + host, + """ + JOURNALIST_EMAIL= \ + bash -x /var/ossec/send_encrypted_alarm.sh journalist | \ + tee /dev/stderr | \ + grep -q 'no notification sent' + """) + + # https://ossec-docs.readthedocs.io/en/latest/manual/rules-decoders/testing.html + @pytest.mark.skip(reason=SKIP_REASON) + def test_ossec_rule_journalist(self, host): + assert self.run(host, """ + set -ex + l="ossec: output: 'head -1 /var/lib/securedrop/submissions_today.txt" + echo "$l" | /var/ossec/bin/ossec-logtest + echo "$l" | /var/ossec/bin/ossec-logtest -U '400600:1:ossec' + """) + + @pytest.mark.skip(reason=SKIP_REASON) + def test_journalist_mail_notification(self, host): + mon = host + app = testinfra.host.Host.get_host( + 'ansible://app-staging', + ansible_inventory=host.backend.ansible_inventory) + # + # run ossec & postfix on mon + # + self.service_started(mon, "postfix") + self.service_started(mon, "ossec") + + # + # ensure the submission_today.txt file exists + # + with app.sudo(): + assert self.run(app, """ + cd /var/www/securedrop + ./manage.py were-there-submissions-today + test -f /var/lib/securedrop/submissions_today.txt + """) + + # + # empty the mailq on mon in case there were leftovers + # + assert self.run(mon, "postsuper -d ALL") + # + # forget about past notifications in case there were leftovers + # + assert self.run(mon, "/var/ossec/process_submissions_today.sh forget") + + # + # the command fires every time ossec starts, + # regardless of the frequency + # https://github.com/ossec/ossec-hids/issues/1415 + # + with app.sudo(): + self.service_restarted(app, "ossec") + + # + # wait until at exactly one notification is sent + # + assert self.wait_for_command( + mon, + "mailq | grep -q [email protected]") + assert self.run( + mon, + "test 1 = $(mailq | grep [email protected] | wc -l)") + + assert self.run( + mon, + "grep --count 'notification suppressed' /var/log/syslog " + "> /tmp/before") + + # + # The second notification within less than 24h is suppressed + # + with app.sudo(): + self.service_restarted(app, "ossec") + assert self.wait_for_command(mon, """ + grep --count 'notification suppressed' /var/log/syslog > /tmp/after + test $(cat /tmp/before) -lt $(cat /tmp/after) + """) + + # + # teardown the ossec and postfix on mon and app + # + self.service_stopped(mon, "postfix") + self.service_stopped(mon, "ossec") + with app.sudo(): + self.service_stopped(app, "ossec") diff --git a/molecule/aws/securedrop_test.pub b/molecule/upgrade/files/apt-test-fpf.pub similarity index 100% rename from molecule/aws/securedrop_test.pub rename to molecule/upgrade/files/apt-test-fpf.pub diff --git a/securedrop/bin/test b/securedrop/bin/test deleted file mode 100755 --- a/securedrop/bin/test +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -set -euo pipefail -export DISPLAY=:1 -Xvfb :1 -screen 0 1024x768x24 -ac +extension GLX +render -noreset & -redis-server & - -rm /dev/random -ln -s /dev/urandom /dev/random - -x11vnc -display :1 -autoport 5901 -shared & - -touch tests/log/firefox.log -function finish { - cp tests/log/firefox.log /tmp/test-results/logs/ - bash <(curl -s https://codecov.io/bash) -} -trap finish EXIT - -mkdir -p "/tmp/test-results/logs" - -export PAGE_LAYOUT_LOCALES="en_US,fr_FR" -pytest \ - --page-layout \ - --durations 10 \ - --junitxml=/tmp/test-results/junit.xml \ - --cov-report html:/tmp/test-results/cov_html \ - --cov-report xml:/tmp/test-results/cov.xml \ - --cov-report annotate:/tmp/test-results/cov_annotate \ - --cov=. \ - "$@" diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -1,30 +1,46 @@ # -*- coding: utf-8 -*- + +import configparser +import pretty_bad_protocol as gnupg +import logging +from hypothesis import settings import os +import io +import json +import psutil +import pytest import shutil import signal import subprocess -import logging -import gnupg -import psutil -import pytest +from flask import url_for +from pyotp import TOTP os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config +from sdconfig import SDConfig, config as original_config + +from os import path + +from db import db +from journalist_app import create_app as create_journalist_app +import models +from source_app import create_app as create_source_app +from . import utils -# TODO: the PID file for the redis worker is hard-coded below. +# The PID file for the redis worker is hard-coded below. # Ideally this constant would be provided by a test harness. # It has been intentionally omitted from `config.py.example` # in order to isolate the test vars from prod vars. TEST_WORKER_PIDFILE = '/tmp/securedrop_test_worker.pid' # Quiet down gnupg output. (See Issue #2595) -gnupg_logger = logging.getLogger(gnupg.__name__) -gnupg_logger.setLevel(logging.ERROR) -valid_levels = {'INFO': logging.INFO, 'DEBUG': logging.DEBUG} -gnupg_logger.setLevel( - valid_levels.get(os.environ.get('GNUPG_LOG_LEVEL', None), logging.ERROR) -) +GNUPG_LOG_LEVEL = os.environ.get('GNUPG_LOG_LEVEL', "ERROR") +gnupg._util.log.setLevel(getattr(logging, GNUPG_LOG_LEVEL, logging.ERROR)) + +# `hypothesis` sets a default deadline of 200 milliseconds before failing tests, +# which doesn't work for integration tests. Turn off deadlines. +settings.register_profile("securedrop", deadline=None) +settings.load_profile("securedrop") def pytest_addoption(parser): @@ -43,20 +59,186 @@ def pytest_collection_modifyitems(config, items): item.add_marker(skip_page_layout) [email protected] +def hardening(request): + hardening = models.LOGIN_HARDENING + + def finalizer(): + models.LOGIN_HARDENING = hardening + request.addfinalizer(finalizer) + models.LOGIN_HARDENING = True + return None + + @pytest.fixture(scope='session') -def setUptearDown(): - _start_test_rqworker(config) +def setUpTearDown(): + _start_test_rqworker(original_config) yield _stop_test_rqworker() - _cleanup_test_securedrop_dataroot(config) + _cleanup_test_securedrop_dataroot(original_config) + + [email protected](scope='function') +def config(tmpdir): + '''Clone the module so we can modify it per test.''' + + cnf = SDConfig() + + data = tmpdir.mkdir('data') + keys = data.mkdir('keys') + os.chmod(str(keys), 0o700) + store = data.mkdir('store') + tmp = data.mkdir('tmp') + sqlite = data.join('db.sqlite') + + # gpg 2.1+ requires gpg-agent, see #4013 + gpg_agent_config = str(keys.join('gpg-agent.conf')) + with open(gpg_agent_config, 'w+') as f: + f.write('allow-loopback-pinentry') + + gpg = gnupg.GPG('gpg2', homedir=str(keys)) + for ext in ['sec', 'pub']: + with io.open(path.join(path.dirname(__file__), + 'files', + 'test_journalist_key.{}'.format(ext))) as f: + gpg.import_keys(f.read()) + + cnf.SECUREDROP_DATA_ROOT = str(data) + cnf.GPG_KEY_DIR = str(keys) + cnf.STORE_DIR = str(store) + cnf.TEMP_DIR = str(tmp) + cnf.DATABASE_FILE = str(sqlite) + + # create the db file + subprocess.check_call(['sqlite3', cnf.DATABASE_FILE, '.databases']) + + return cnf + + [email protected](scope='function') +def alembic_config(config): + base_dir = path.join(path.dirname(__file__), '..') + migrations_dir = path.join(base_dir, 'alembic') + ini = configparser.SafeConfigParser() + ini.read(path.join(base_dir, 'alembic.ini')) + + ini.set('alembic', 'script_location', path.join(migrations_dir)) + ini.set('alembic', 'sqlalchemy.url', 'sqlite:///' + config.DATABASE_FILE) + + alembic_path = path.join(config.SECUREDROP_DATA_ROOT, 'alembic.ini') + config.TESTING_ALEMBIC_PATH = alembic_path + + with open(alembic_path, 'w') as f: + ini.write(f) + + return alembic_path + + [email protected](scope='function') +def source_app(config): + app = create_source_app(config) + app.config['SERVER_NAME'] = 'localhost.localdomain' + with app.app_context(): + db.create_all() + yield app + + [email protected](scope='function') +def journalist_app(config): + app = create_journalist_app(config) + app.config['SERVER_NAME'] = 'localhost.localdomain' + with app.app_context(): + db.create_all() + yield app + + [email protected](scope='function') +def test_journo(journalist_app): + with journalist_app.app_context(): + user, password = utils.db_helper.init_journalist(is_admin=False) + username = user.username + otp_secret = user.otp_secret + return {'journalist': user, + 'username': username, + 'password': password, + 'otp_secret': otp_secret, + 'id': user.id, + 'uuid': user.uuid, + 'first_name': user.first_name, + 'last_name': user.last_name} + + [email protected](scope='function') +def test_admin(journalist_app): + with journalist_app.app_context(): + user, password = utils.db_helper.init_journalist(is_admin=True) + username = user.username + otp_secret = user.otp_secret + return {'admin': user, + 'username': username, + 'password': password, + 'otp_secret': otp_secret, + 'id': user.id} + + [email protected](scope='function') +def test_source(journalist_app): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'id': source.id} + + [email protected](scope='function') +def test_submissions(journalist_app): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions} + + [email protected](scope='function') +def test_files(journalist_app, test_journo): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + utils.db_helper.reply(test_journo['journalist'], source, 1) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions, + 'replies': source.replies} + + [email protected](scope='function') +def journalist_api_token(journalist_app, test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=utils.api_helper.get_api_headers()) + return response.json['token'] def _start_test_rqworker(config): if not psutil.pid_exists(_get_pid_from_file(TEST_WORKER_PIDFILE)): - tmp_logfile = open('/tmp/test_rqworker.log', 'w') - subprocess.Popen(['rqworker', 'test', + tmp_logfile = io.open('/tmp/test_rqworker.log', 'w') + subprocess.Popen(['rqworker', config.RQ_WORKER_NAME, '-P', config.SECUREDROP_ROOT, - '--pid', TEST_WORKER_PIDFILE], + '--pid', TEST_WORKER_PIDFILE, + '--logging_level', 'DEBUG', + '-v'], stdout=tmp_logfile, stderr=subprocess.STDOUT) @@ -73,9 +255,9 @@ def _stop_test_rqworker(): def _get_pid_from_file(pid_file_name): try: - return int(open(pid_file_name).read()) + return int(io.open(pid_file_name).read()) except IOError: - return None + return -1 def _cleanup_test_securedrop_dataroot(config): diff --git a/securedrop/tests/functional/README.md b/securedrop/tests/functional/README.md new file mode 100644 --- /dev/null +++ b/securedrop/tests/functional/README.md @@ -0,0 +1,33 @@ +### To test in prod vms + +- `sudo -u www-data bash` +- `cd /var/www/securedrop/` +- `./manage.py reset` # This will clean the DB for testing +- `./create-dev-data.py --staging` + +Update this information to the `tests/functional/instance_information.json` file. + +The content of the file looks like below. + +``` +{ + "hidserv_token": "asfjsdfag", + "journalist_location": "http://thejournalistfqb.onion", + "source_location": "http://thesourceadsfa.onion", + "timeout": 10, + "user": { + "name": "journalist", + "password": "WEjwn8ZyczDhQSK24YKM8C9a", + "secret": "JHCOGO7VCER3EJ4L" + } +} +``` + +### Run the tests + +``` +cd securedrop +./bin/dev-shell ./bin/run-test -v tests/functional/ +``` +You may wish to append a pipe to less (i.e. `| less`), as a failure may generate +many pages of output, making it difficult to scroll back. diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py --- a/securedrop/tests/functional/functional_test.py +++ b/securedrop/tests/functional/functional_test.py @@ -1,222 +1,444 @@ # -*- coding: utf-8 -*- -from datetime import datetime -import errno -import mock -from multiprocessing import Process +from __future__ import print_function + +import logging import os -from os.path import abspath, dirname, join, realpath import signal import socket import time import traceback -import requests +from datetime import datetime +from multiprocessing import Process +from os.path import abspath +from os.path import dirname +from os.path import expanduser +from os.path import join +from os.path import realpath -from Cryptodome import Random +import mock +import pyotp +import pytest +import requests +import tbselenium.common as cm from selenium import webdriver -from selenium.common.exceptions import (WebDriverException, - NoAlertPresentException) -from selenium.webdriver.firefox import firefox_binary -from selenium.webdriver.support.ui import WebDriverWait +from selenium.common.exceptions import NoAlertPresentException +from selenium.common.exceptions import WebDriverException +from selenium.webdriver.common.by import By +from selenium.webdriver.remote.remote_connection import LOGGER from selenium.webdriver.support import expected_conditions - -os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -import db -import journalist -from source_app import create_app -import crypto_util +from selenium.webdriver.support.ui import WebDriverWait +from selenium.webdriver.common.action_chains import ActionChains +from selenium.webdriver.common.keys import Keys +from sqlalchemy.exc import IntegrityError +from tbselenium.tbdriver import TorBrowserDriver +from tbselenium.utils import disable_js + +import journalist_app +import source_app import tests.utils.env as env +from db import db +from models import Journalist +from sdconfig import config -LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log')) +os.environ["SECUREDROP_ENV"] = "test" +LOGFILE_PATH = abspath(join(dirname(realpath(__file__)), "../log/driver.log")) +FIREFOX_PATH = "/usr/bin/firefox/firefox" -# https://stackoverflow.com/a/34795883/837471 -class alert_is_not_present(object): - """ Expect an alert to not be present.""" - def __call__(self, driver): - try: - alert = driver.switch_to.alert - alert.text - return False - except NoAlertPresentException: - return True +TBB_PATH = abspath(join(expanduser("~"), ".local/tbb/tor-browser_en-US/")) +os.environ["TBB_PATH"] = TBB_PATH +TBBRC = join(TBB_PATH, "Browser/TorBrowser/Data/Tor/torrc") +LOGGER.setLevel(logging.WARNING) + +FIREFOX = "firefox" +TORBROWSER = "torbrowser" + +TBB_SECURITY_HIGH = 1 +TBB_SECURITY_MEDIUM = 3 # '2' corresponds to deprecated TBB medium-high setting +TBB_SECURITY_LOW = 4 class FunctionalTest(object): + gpg = None + new_totp = None + session_expiration = 30 + secret_message = "These documents outline a major government invasion of privacy." + timeout = 10 + poll_frequency = 0.1 + + accept_languages = None + default_driver_name = TORBROWSER + driver = None + firefox_driver = None + torbrowser_driver = None + + driver_retry_count = 3 + driver_retry_interval = 5 def _unused_port(self): s = socket.socket() - s.bind(("localhost", 0)) + s.bind(("127.0.0.1", 0)) port = s.getsockname()[1] s.close() return port - def _create_webdriver(self, firefox, profile=None): - # see https://review.openstack.org/#/c/375258/ and the - # associated issues for background on why this is necessary - connrefused_retry_count = 3 - connrefused_retry_interval = 5 - - for i in range(connrefused_retry_count + 1): - try: - driver = webdriver.Firefox(firefox_binary=firefox, - firefox_profile=profile) - if i > 0: - # i==0 is normal behavior without connection refused. - print('NOTE: Retried {} time(s) due to ' - 'connection refused.'.format(i)) - return driver - except socket.error as socket_error: - if (socket_error.errno == errno.ECONNREFUSED - and i < connrefused_retry_count): - time.sleep(connrefused_retry_interval) - continue - raise - - def _prepare_webdriver(self): - log_file = open(join(LOG_DIR, 'firefox.log'), 'a') - log_file.write( - '\n\n[%s] Running Functional Tests\n' % str( - datetime.now())) + def set_tbb_securitylevel(self, level): + + if level not in {TBB_SECURITY_HIGH, TBB_SECURITY_MEDIUM, TBB_SECURITY_LOW}: + raise ValueError("Invalid Tor Brouser security setting: " + str(level)) + + if self.torbrowser_driver is None: + self.create_torbrowser_driver() + driver = self.torbrowser_driver + + driver.get("about:config") + accept_risk_button = driver.find_element_by_id("warningButton") + if accept_risk_button: + accept_risk_button.click() + ActionChains(driver).send_keys(Keys.RETURN).\ + send_keys("extensions.torbutton.security_slider").perform() + time.sleep(1) + ActionChains(driver).send_keys(Keys.TAB).\ + send_keys(Keys.RETURN).perform() + alert = self.wait_for(lambda: driver.switch_to.alert) + alert.send_keys(str(level)) + time.sleep(1) + self.wait_for(lambda: alert.accept()) + + def create_torbrowser_driver(self): + logging.info("Creating TorBrowserDriver") + log_file = open(LOGFILE_PATH, "a") + log_file.write("\n\n[%s] Running Functional Tests\n" % str(datetime.now())) log_file.flush() - return firefox_binary.FirefoxBinary(log_file=log_file) - - def setup(self, session_expiration=30): - # Patch the two-factor verification to avoid intermittent errors - self.patcher = mock.patch('db.Journalist.verify_token') - self.mock_journalist_verify_token = self.patcher.start() - self.mock_journalist_verify_token.return_value = True - self.patcher2 = mock.patch('source_app.main.get_entropy_estimate') - self.mock_get_entropy_estimate = self.patcher2.start() - self.mock_get_entropy_estimate.return_value = 8192 - - signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s)) - - env.create_directories() - self.gpg = env.init_gpg() - db.init_db() - - source_port = self._unused_port() - journalist_port = self._unused_port() + # Don't use Tor when reading from localhost, and turn off private + # browsing. We need to turn off private browsing because we won't be + # able to access the browser's cookies in private browsing mode. Since + # we use session cookies in SD anyway (in private browsing mode all + # cookies are set as session cookies), this should not affect session + # lifetime. + pref_dict = { + "network.proxy.no_proxies_on": "127.0.0.1", + "browser.privatebrowsing.autostart": False, + } + if self.accept_languages is not None: + pref_dict["intl.accept_languages"] = self.accept_languages + + for i in range(self.driver_retry_count): + try: + self.torbrowser_driver = TorBrowserDriver( + TBB_PATH, + tor_cfg=cm.USE_RUNNING_TOR, + pref_dict=pref_dict, + tbb_logfile_path=LOGFILE_PATH, + ) + logging.info("Created Tor Browser web driver") + self.torbrowser_driver.set_window_position(0, 0) + self.torbrowser_driver.set_window_size(1024, 1200) + break + except Exception as e: + logging.error("Error creating Tor Browser web driver: %s", e) + if i < self.driver_retry_count: + time.sleep(self.driver_retry_interval) - self.source_location = "http://localhost:%d" % source_port - self.journalist_location = "http://localhost:%d" % journalist_port + if not self.torbrowser_driver: + raise Exception("Could not create Tor Browser web driver") - # Allow custom session expiration lengths - self.session_expiration = session_expiration + def create_firefox_driver(self): + logging.info("Creating Firefox web driver") - def start_source_server(): - # We call Random.atfork() here because we fork the source and - # journalist server from the main Python process we use to drive - # our browser with multiprocessing.Process() below. These child - # processes inherit the same RNG state as the parent process, which - # is a problem because they would produce identical output if we - # didn't re-seed them after forking. - Random.atfork() + profile = webdriver.FirefoxProfile() + if self.accept_languages is not None: + profile.set_preference("intl.accept_languages", self.accept_languages) + profile.update_preferences() - config.SESSION_EXPIRATION_MINUTES = self.session_expiration + for i in range(self.driver_retry_count): + try: + self.firefox_driver = webdriver.Firefox( + firefox_binary=FIREFOX_PATH, firefox_profile=profile + ) + self.firefox_driver.set_window_position(0, 0) + self.firefox_driver.set_window_size(1024, 1200) + logging.info("Created Firefox web driver") + break + except Exception as e: + logging.error("Error creating Firefox web driver: %s", e) + if i < self.driver_retry_count: + time.sleep(self.driver_retry_interval) + if not self.firefox_driver: + raise Exception("Could not create Firefox web driver") + + def switch_to_firefox_driver(self): + if not self.firefox_driver: + self.create_firefox_driver() + self.driver = self.firefox_driver + logging.info("Switched %s to Firefox driver: %s", self, self.driver) + + def switch_to_torbrowser_driver(self): + if self.torbrowser_driver is None: + self.create_torbrowser_driver() + self.driver = self.torbrowser_driver + logging.info("Switched %s to TorBrowser driver: %s", self, self.driver) + + def disable_js_torbrowser_driver(self): + if hasattr(self, 'torbrowser_driver'): + disable_js(self.torbrowser_driver) + + @pytest.fixture(autouse=True) + def set_default_driver(self): + logging.info("Creating default web driver: %s", self.default_driver_name) + if self.default_driver_name == FIREFOX: + self.switch_to_firefox_driver() + else: + self.switch_to_torbrowser_driver() + + yield - source_app = create_app(config) + try: + if self.torbrowser_driver: + self.torbrowser_driver.quit() + except Exception as e: + logging.error("Error stopping TorBrowser driver: %s", e) - source_app.run( - port=source_port, - debug=True, - use_reloader=False, - threaded=True) + try: + if self.firefox_driver: + self.firefox_driver.quit() + except Exception as e: + logging.error("Error stopping Firefox driver: %s", e) - def start_journalist_server(): - Random.atfork() - journalist.app.run( - port=journalist_port, - debug=True, - use_reloader=False, - threaded=True) + @pytest.fixture(autouse=True) + def sd_servers(self): + logging.info( + "Starting SecureDrop servers (session expiration = %s)", self.session_expiration + ) - self.source_process = Process(target=start_source_server) - self.journalist_process = Process(target=start_journalist_server) + # Patch the two-factor verification to avoid intermittent errors + logging.info("Mocking models.Journalist.verify_token") + with mock.patch("models.Journalist.verify_token", return_value=True): + logging.info("Mocking source_app.main.get_entropy_estimate") + with mock.patch("source_app.main.get_entropy_estimate", return_value=8192): + + try: + signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s)) + + source_port = self._unused_port() + journalist_port = self._unused_port() + + self.source_location = "http://127.0.0.1:%d" % source_port + self.journalist_location = "http://127.0.0.1:%d" % journalist_port + + self.source_app = source_app.create_app(config) + self.journalist_app = journalist_app.create_app(config) + self.journalist_app.config["WTF_CSRF_ENABLED"] = True + + self.__context = self.journalist_app.app_context() + self.__context.push() + + env.create_directories() + db.create_all() + self.gpg = env.init_gpg() + + # Add our test user + try: + valid_password = "correct horse battery staple profanity oil chewy" + user = Journalist( + username="journalist", password=valid_password, is_admin=True + ) + user.otp_secret = "JHCOGO7VCER3EJ4L" + db.session.add(user) + db.session.commit() + except IntegrityError: + logging.error("Test user already added") + db.session.rollback() + + # This user is required for our tests cases to login + self.admin_user = { + "name": "journalist", + "password": ("correct horse battery staple" " profanity oil chewy"), + "secret": "JHCOGO7VCER3EJ4L", + } + + self.admin_user["totp"] = pyotp.TOTP(self.admin_user["secret"]) + + def start_source_server(app): + config.SESSION_EXPIRATION_MINUTES = self.session_expiration / 60.0 + + app.run(port=source_port, debug=True, use_reloader=False, threaded=True) + + def start_journalist_server(app): + app.run(port=journalist_port, debug=True, use_reloader=False, threaded=True) + + self.source_process = Process( + target=lambda: start_source_server(self.source_app) + ) + + self.journalist_process = Process( + target=lambda: start_journalist_server(self.journalist_app) + ) + + self.source_process.start() + self.journalist_process.start() + + for tick in range(30): + try: + requests.get(self.source_location, timeout=1) + requests.get(self.journalist_location, timeout=1) + except Exception: + time.sleep(0.25) + else: + break + yield + finally: + try: + self.source_process.terminate() + except Exception as e: + logging.error("Error stopping source app: %s", e) + + try: + self.journalist_process.terminate() + except Exception as e: + logging.error("Error stopping source app: %s", e) + + env.teardown() + self.__context.pop() - self.source_process.start() - self.journalist_process.start() + def wait_for_source_key(self, source_name): + filesystem_id = self.source_app.crypto_util.hash_codename(source_name) - for tick in range(30): - try: - requests.get(self.source_location) - requests.get(self.journalist_location) - except: - time.sleep(1) - else: - break + def key_available(filesystem_id): + assert self.source_app.crypto_util.getkey(filesystem_id) - if not hasattr(self, 'override_driver'): - self.driver = self._create_webdriver(self._prepare_webdriver()) - - # Polls the DOM to wait for elements. To read more about why - # this is necessary: - # - # http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html - # - # A value of 5 is known to not be enough in some cases, when - # the machine hosting the tests is slow, reason why it was - # raised to 10. Setting the value to 60 or more would surely - # cover even the slowest of machine. However it also means - # that a test failing to find the desired element in the DOM - # will only report failure after 60 seconds which is painful - # for quickly debuging. - # - self.driver.implicitly_wait(10) - - # Set window size and position explicitly to avoid potential bugs due - # to discrepancies between environments. - self.driver.set_window_position(0, 0) - self.driver.set_window_size(1024, 768) - - self.secret_message = ('These documents outline a major government ' - 'invasion of privacy.') + self.wait_for(lambda: key_available(filesystem_id), timeout=60) - def wait_for_source_key(self, source_name): - filesystem_id = crypto_util.hash_codename(source_name) + def create_new_totp(self, secret): + self.new_totp = pyotp.TOTP(secret) - def key_available(filesystem_id): - assert crypto_util.getkey(filesystem_id) - self.wait_for( - lambda: key_available(filesystem_id), timeout=60) - - def teardown(self): - self.patcher.stop() - env.teardown() - if not hasattr(self, 'override_driver'): - self.driver.quit() - self.source_process.terminate() - self.journalist_process.terminate() - - def wait_for(self, function_with_assertion, timeout=5): + def wait_for(self, function_with_assertion, timeout=None): """Polling wait for an arbitrary assertion.""" # Thanks to # http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions + if timeout is None: + timeout = self.timeout + start_time = time.time() while time.time() - start_time < timeout: try: return function_with_assertion() except (AssertionError, WebDriverException): - time.sleep(0.1) + time.sleep(self.poll_frequency) # one more try, which will raise any errors if they are outstanding return function_with_assertion() - def _alert_wait(self): - WebDriverWait(self.driver, 10).until( - expected_conditions.alert_is_present(), - 'Timed out waiting for confirmation popup.') + def safe_click_by_id(self, element_id): + """ + Clicks the element with the given ID attribute. + + Returns: + el: The element, if found. + + Raises: + selenium.common.exceptions.TimeoutException: If the element cannot be found in time. + + """ + el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.ID, element_id)) + ) + el.location_once_scrolled_into_view + el.click() + return el + + def safe_click_by_css_selector(self, selector): + """ + Clicks the first element with the given CSS selector. + + Returns: + el: The element, if found. + + Raises: + selenium.common.exceptions.TimeoutException: If the element cannot be found in time. + + """ + el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.CSS_SELECTOR, selector)) + ) + el.click() + return el + + def safe_click_all_by_css_selector(self, selector, root=None): + """ + Clicks each element that matches the given CSS selector. + + Returns: + els (list): The list of elements that matched the selector. + + Raises: + selenium.common.exceptions.TimeoutException: If the element cannot be found in time. + + """ + if root is None: + root = self.driver + els = self.wait_for(lambda: root.find_elements_by_css_selector(selector)) + for el in els: + clickable_el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.CSS_SELECTOR, selector)) + ) + clickable_el.click() + return els + + def safe_send_keys_by_id(self, element_id, text): + """ + Sends the given text to the element with the specified ID. + + Returns: + el: The element, if found. + + Raises: + selenium.common.exceptions.TimeoutException: If the element cannot be found in time. + + """ + el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.ID, element_id)) + ) + el.send_keys(text) + return el + + def safe_send_keys_by_css_selector(self, selector, text): + """ + Sends the given text to the first element with the given CSS selector. + + Returns: + el: The element, if found. + + Raises: + selenium.common.exceptions.TimeoutException: If the element cannot be found in time. + + """ + el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.CSS_SELECTOR, selector)) + ) + el.send_keys(text) + return el + + def alert_wait(self, timeout=None): + if timeout is None: + timeout = self.timeout * 10 + WebDriverWait(self.driver, timeout, self.poll_frequency).until( + expected_conditions.alert_is_present(), "Timed out waiting for confirmation popup." + ) + + def alert_accept(self): + # adapted from https://stackoverflow.com/a/34795883/837471 + def alert_is_not_present(object): + """ Expect an alert to not be present.""" + try: + alert = self.driver.switch_to.alert + alert.text + return False + except NoAlertPresentException: + return True - def _alert_accept(self): self.driver.switch_to.alert.accept() - WebDriverWait(self.driver, 10).until( - alert_is_not_present(), - 'Timed out waiting for confirmation popup to disappear.') - - def _alert_dismiss(self): - self.driver.switch_to.alert.dismiss() - WebDriverWait(self.driver, 10).until( - alert_is_not_present(), - 'Timed out waiting for confirmation popup to disappear.') + WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + alert_is_not_present, "Timed out waiting for confirmation popup to disappear." + ) diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -1,22 +1,46 @@ -import pytest -import urllib2 -import re -import tempfile +from __future__ import print_function + import gzip +import logging import os +import random +import re +import tempfile +import time +from os.path import dirname +import pytest +import requests from selenium.common.exceptions import NoSuchElementException +from selenium.common.exceptions import TimeoutException +from selenium.webdriver.common.action_chains import ActionChains +from selenium.webdriver.common.by import By +from selenium.webdriver.common.keys import Keys +from selenium.webdriver.support import expected_conditions +from selenium.webdriver.support.ui import WebDriverWait + + +# Number of times to try flaky clicks. +CLICK_ATTEMPTS = 15 + -import tests.utils.db_helper as db_helper -import crypto_util -from db import Journalist -from step_helpers import screenshots -import config +# A generator to get unlimited user names for our tests. +# The pages-layout tests require many users during +# the test run, that is why have the following +# implementation. +def get_journalist_usernames(): + yield "dellsberg" + yield "jpb" + yield "bassel" + while True: + num = random.randint(1000, 1000000) + yield "journalist" + str(num) -class JournalistNavigationStepsMixin(): +journalist_usernames = get_journalist_usernames() - @screenshots + +class JournalistNavigationStepsMixin: def _get_submission_content(self, file_url, raw_content): if not file_url.endswith(".gz.gpg"): return str(raw_content) @@ -25,626 +49,961 @@ def _get_submission_content(self, file_url, raw_content): fp.write(raw_content.data) fp.seek(0) - gzf = gzip.GzipFile(mode='rb', fileobj=fp) + gzf = gzip.GzipFile(mode="rb", fileobj=fp) content = gzf.read() return content + def return_downloaded_content(self, url, cookies): + """ + This downloads and returns the content to the caller + :param url: URL to download + :param cookies: the cookies to access + :return: Content of the URL + """ + proxies = None + if ".onion" in url: + proxies = {"http": "socks5h://127.0.0.1:9150", "https": "socks5h://127.0.0.1:9150"} + r = requests.get(url, cookies=cookies, proxies=proxies, stream=True) + if r.status_code != 200: + raise Exception("Failed to download the data.") + data = b"" + for chunk in r.iter_content(1024): + data += chunk + return data + def _input_text_in_login_form(self, username, password, token): self.driver.get(self.journalist_location + "/login") - username_field = self.driver.find_element_by_css_selector( - 'input[name="username"]') - username_field.send_keys(username) - - password_field = self.driver.find_element_by_css_selector( - 'input[name="password"]') - password_field.send_keys(password) - - token_field = self.driver.find_element_by_css_selector( - 'input[name="token"]') - token_field.send_keys(token) + self.safe_send_keys_by_css_selector('input[name="username"]', username) + self.safe_send_keys_by_css_selector('input[name="password"]', password) + self.safe_send_keys_by_css_selector('input[name="token"]', token) def _try_login_user(self, username, password, token): self._input_text_in_login_form(username, password, token) + self.safe_click_by_css_selector('button[type="submit"]') - submit_button = self.driver.find_element_by_css_selector( - 'button[type=submit]') - submit_button.click() + def _login_user(self, username, password, otp, maxtries=3): + token = str(otp.now()) + for i in range(maxtries): - @screenshots - def _login_user(self, username, password, token): - self._try_login_user(username, password, token) - # Successful login should redirect to the index - assert self.driver.current_url == self.journalist_location + '/' + self._try_login_user(username, password, token) + # Successful login should redirect to the index + self.wait_for( + lambda: self.driver.find_element_by_id("logout"), timeout=self.timeout * 2 + ) + if self.driver.current_url != self.journalist_location + "/": + new_token = str(otp.now()) + while token == new_token: + time.sleep(1) + new_token = str(otp.now()) + token = new_token + else: + return + + # If we reach here, assert the error + assert self.driver.current_url == self.journalist_location + "/", ( + self.driver.current_url + " " + self.journalist_location + ) + + def _is_on_journalist_homepage(self): + return self.wait_for( + lambda: self.driver.find_element_by_css_selector("div.journalist-view-all") + ) - @screenshots def _journalist_logs_in(self): # Create a test user for logging in - self.user, self.user_pw = db_helper.init_journalist() - self._login_user(self.user.username, self.user_pw, 'mocked') - - headline = self.driver.find_element_by_css_selector('span.headline') - if not hasattr(self, 'accept_languages'): - assert 'Sources' in headline.text + self.user = self.admin_user["name"] + self.user_pw = self.admin_user["password"] + self._login_user(self.user, self.user_pw, self.admin_user["totp"]) + assert self._is_on_journalist_homepage() def _journalist_visits_col(self): - self.driver.find_element_by_css_selector( - '#un-starred-source-link-1').click() + self.wait_for(lambda: self.driver.find_element_by_id("cols")) + + self.safe_click_by_id("un-starred-source-link-1") + + self.wait_for(lambda: self.driver.find_element_by_css_selector("ul#submissions")) def _journalist_selects_first_doc(self): - self.driver.find_elements_by_name('doc_names_selected')[0].click() + self.safe_click_by_css_selector('input[type="checkbox"][name="doc_names_selected"]') + + self.wait_for( + lambda: expected_conditions.element_located_to_be_selected( + (By.CSS_SELECTOR, 'input[type="checkbox"][name="doc_names_selected"]') + ) + ) - def _journalist_clicks_delete_selected_javascript(self): - self.driver.find_element_by_id('delete-selected').click() - self._alert_wait() + assert self.driver.find_element_by_css_selector( + 'input[type="checkbox"][name="doc_names_selected"]' + ).is_selected() - def _journalist_verifies_deletion_of_one_submission_javascript(self): - self._journalist_selects_first_doc() - self._journalist_clicks_delete_selected_javascript() - self._alert_dismiss() - selected_count = len(self.driver.find_elements_by_name( - 'doc_names_selected')) + def _journalist_clicks_on_modal(self, click_id): + self.safe_click_by_id(click_id) + + def _journalist_clicks_delete_collections_cancel_on_modal(self): + self._journalist_clicks_on_modal("cancel-collections-deletions") + + def _journalist_clicks_delete_selected_cancel_on_modal(self): + self._journalist_clicks_on_modal("cancel-selected-deletions") + + def _journalist_clicks_delete_collection_cancel_on_modal(self): + self._journalist_clicks_on_modal("cancel-collection-deletions") + + def _journalist_clicks_delete_collections_on_modal(self): + self._journalist_clicks_on_modal("delete-collections") + + def collection_deleted(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "1 collection deleted" in flash_msg.text + + self.wait_for(collection_deleted) + + def _journalist_clicks_delete_selected_on_modal(self): + self._journalist_clicks_on_modal("delete-selected") + + def submission_deleted(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Submission deleted." in flash_msg.text + + self.wait_for(submission_deleted) + + def _journalist_clicks_delete_collection_on_modal(self): + self._journalist_clicks_on_modal("delete-collection-button") + + def _journalist_clicks_delete_link(self, click_id, displayed_id): + self.safe_click_by_id(click_id) + self.wait_for(lambda: self.driver.find_element_by_id(displayed_id)) + + def _journalist_clicks_delete_selected_link(self): + self.safe_click_by_css_selector("a#delete-selected-link > button.danger") + self.wait_for(lambda: self.driver.find_element_by_id("delete-selected-confirmation-modal")) + + def _journalist_clicks_delete_collections_link(self): + self._journalist_clicks_delete_link("delete-collections-link", "delete-confirmation-modal") + + def _journalist_clicks_delete_collection_link(self): + self._journalist_clicks_delete_link( + "delete-collection-link", "delete-collection-confirmation-modal" + ) + + def _journalist_uses_delete_selected_button_confirmation(self): + selected_count = len(self.driver.find_elements_by_name("doc_names_selected")) assert selected_count > 0 - self._journalist_clicks_delete_selected_javascript() - self._alert_accept() - assert selected_count > len(self.driver.find_elements_by_name( - 'doc_names_selected')) - @screenshots + self._journalist_selects_first_doc() + self._journalist_clicks_delete_selected_link() + self._journalist_clicks_delete_selected_cancel_on_modal() + assert selected_count == len(self.driver.find_elements_by_name("doc_names_selected")) + + self._journalist_clicks_delete_selected_link() + self._journalist_clicks_delete_selected_on_modal() + + def docs_deleted(): + assert selected_count > len(self.driver.find_elements_by_name("doc_names_selected")) + + self.wait_for(docs_deleted) + + def _journalist_uses_delete_collection_button_confirmation(self): + self._journalist_clicks_delete_collection_link() + self._journalist_clicks_delete_collection_cancel_on_modal() + self._journalist_clicks_delete_collection_link() + self._journalist_clicks_delete_collection_on_modal() + + # Now we should be redirected to the index. + assert self._is_on_journalist_homepage() + + def _journalist_uses_delete_collections_button_confirmation(self): + sources = self.driver.find_elements_by_class_name("code-name") + assert len(sources) > 0 + + try: + # If JavaScript is enabled, use the select_all button. + self.driver.find_element_by_id("select_all") + self.safe_click_by_id("select_all") + except NoSuchElementException: + self.safe_click_all_by_css_selector('input[type="checkbox"][name="cols_selected"]') + + self._journalist_clicks_delete_collections_link() + self._journalist_clicks_delete_collections_cancel_on_modal() + + sources = self.driver.find_elements_by_class_name("code-name") + assert len(sources) > 0 + + self._journalist_clicks_delete_collections_link() + self._journalist_clicks_delete_collections_on_modal() + + # We should be redirected to the index without those boxes selected. + def no_sources(): + assert len(self.driver.find_elements_by_class_name("code-name")) == 0 + + self.wait_for(no_sources) + def _admin_logs_in(self): - self.admin, self.admin_pw = db_helper.init_journalist(is_admin=True) - self._login_user(self.admin.username, self.admin_pw, 'mocked') - - if not hasattr(self, 'accept_languages'): - # Admin user should log in to the same interface as a - # normal user, since there may be users who wish to be - # both journalists and admins. - headline = self.driver.find_element_by_css_selector( - 'span.headline') - assert 'Sources' in headline.text - - # Admin user should have a link that take them to the admin page - links = self.driver.find_elements_by_tag_name('a') - assert 'Admin' in [el.text for el in links] - - @screenshots + self.admin = self.admin_user["name"] + self.admin_pw = self.admin_user["password"] + self._login_user(self.admin, self.admin_pw, self.admin_user["totp"]) + + # Admin user should log in to the same interface as a + # normal user, since there may be users who wish to be + # both journalists and admins. + assert self._is_on_journalist_homepage() + + # Admin user should have a link that take them to the admin page + assert self.driver.find_element_by_id("link-admin-index") + def _admin_visits_admin_interface(self): - admin_interface_link = self.driver.find_element_by_id( - 'link-admin-index') - admin_interface_link.click() - if not hasattr(self, 'accept_languages'): - h1s = self.driver.find_elements_by_tag_name('h1') - assert "Admin Interface" in [el.text for el in h1s] + self.safe_click_by_id("link-admin-index") + + self.wait_for(lambda: self.driver.find_element_by_id("add-user")) def _admin_visits_system_config_page(self): - system_config_link = self.driver.find_element_by_id( - 'update-instance-config' - ) - system_config_link.click() - if not hasattr(self, 'accept_languages'): - h1 = self.driver.find_element_by_tag_name('h1') - assert "Instance Configuration" in h1.text + self.safe_click_by_id("update-instance-config") + + def config_page_loaded(): + assert self.driver.find_element_by_id("test-ossec-alert") + + self.wait_for(config_page_loaded) def _admin_updates_logo_image(self): - logo_upload_input = self.driver.find_element_by_id('logo-upload') - logo_upload_input.send_keys( - os.path.join(config.SECUREDROP_ROOT, "static/i/logo.png") - ) + dir_name = dirname(dirname(dirname(os.path.abspath(__file__)))) + image_path = os.path.abspath(os.path.join(dir_name, "static/i/logo.png")) + + self.safe_send_keys_by_id("logo-upload", image_path) + + self.safe_click_by_id("submit-logo-update") + + def updated_image(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Image updated." in flash_msg.text - submit_button = self.driver.find_element_by_id('submit-logo-update') - submit_button.click() + # giving extra time for upload to complete + self.wait_for(updated_image, timeout=self.timeout * 6) - if not hasattr(self, 'accept_languages'): - flashed_msgs = self.driver.find_element_by_css_selector('.flash') - assert 'Image updated.' in flashed_msgs.text + def _add_user(self, username, first_name="", last_name="", is_admin=False, hotp=None): + self.safe_send_keys_by_css_selector('input[name="username"]', username) - @screenshots - def _add_user(self, username, is_admin=False, hotp=None): - username_field = self.driver.find_element_by_css_selector( - 'input[name="username"]') - username_field.send_keys(username) + if first_name: + self.safe_send_keys_by_id("first_name", first_name) + + if last_name: + self.safe_send_keys_by_id("last_name", last_name) if hotp: - hotp_checkbox = self.driver.find_element_by_css_selector( - 'input[name="is_hotp"]') - print(str(hotp_checkbox.__dict__)) - hotp_checkbox.click() - hotp_secret = self.driver.find_element_by_css_selector( - 'input[name="otp_secret"]') - hotp_secret.send_keys(hotp) + self.safe_click_all_by_css_selector('input[name="is_hotp"]') + self.safe_send_keys_by_css_selector('input[name="otp_secret"]', hotp) if is_admin: - # TODO implement (checkbox is unchecked by default) - pass + self.safe_click_by_css_selector('input[name="is_admin"]') - submit_button = self.driver.find_element_by_css_selector( - 'button[type=submit]') - submit_button.click() + self.safe_click_by_css_selector("button[type=submit]") - @screenshots - def _admin_adds_a_user(self): - add_user_btn = self.driver.find_element_by_css_selector( - 'button#add-user') - add_user_btn.click() + self.wait_for(lambda: self.driver.find_element_by_id("check-token")) + + def _admin_adds_a_user(self, is_admin=False, new_username=""): + self.safe_click_by_id("add-user") - if not hasattr(self, 'accept_languages'): + self.wait_for(lambda: self.driver.find_element_by_id("username")) + + if not hasattr(self, "accept_languages"): # The add user page has a form with an "ADD USER" button - btns = self.driver.find_elements_by_tag_name('button') - assert 'ADD USER' in [el.text for el in btns] + btns = self.driver.find_elements_by_tag_name("button") + assert "ADD USER" in [el.text for el in btns] - password = self.driver.find_element_by_css_selector('#password') \ - .text.strip() + password = self.driver.find_element_by_css_selector("#password").text.strip() - self.new_user = dict( - username='dellsberg', - password=password, - ) - self._add_user(self.new_user['username']) + if not new_username: + new_username = next(journalist_usernames) + self.new_user = dict(username=new_username, first_name='', last_name='', password=password) + self._add_user(self.new_user["username"], + first_name=self.new_user['first_name'], + last_name=self.new_user['last_name'], + is_admin=is_admin) - if not hasattr(self, 'accept_languages'): + if not hasattr(self, "accept_languages"): # Clicking submit on the add user form should redirect to # the FreeOTP page - h1s = self.driver.find_elements_by_tag_name('h1') - assert "Enable FreeOTP" in [el.text for el in h1s] + h1s = [h1.text for h1 in self.driver.find_elements_by_tag_name("h1")] + assert "Enable FreeOTP" in h1s - # Retrieve the saved user object from the db and keep it around for - # further testing - self.new_user['orm_obj'] = Journalist.query.filter( - Journalist.username == self.new_user['username']).one() + shared_secret = ( + self.driver.find_element_by_css_selector("#shared-secret").text.strip().replace(" ", "") + ) + self.create_new_totp(shared_secret) # Verify the two-factor authentication - token_field = self.driver.find_element_by_css_selector( - 'input[name="token"]') - token_field.send_keys('mocked') - submit_button = self.driver.find_element_by_css_selector( - 'button[type=submit]') - submit_button.click() - - if not hasattr(self, 'accept_languages'): - # Successfully verifying the code should redirect to the admin - # interface, and flash a message indicating success - flashed_msgs = self.driver.find_elements_by_css_selector('.flash') - assert (("Token in two-factor authentication " - "accepted for user {}.").format( - self.new_user['username']) in - [el.text for el in flashed_msgs]) + self.safe_send_keys_by_css_selector('input[name="token"]', str(self.new_totp.now())) + self.safe_click_by_css_selector("button[type=submit]") + + def user_token_added(): + if not hasattr(self, "accept_languages"): + # Successfully verifying the code should redirect to the admin + # interface, and flash a message indicating success + flash_msg = self.driver.find_elements_by_css_selector(".flash") + assert "The two-factor code for user \"{user}\" was verified successfully.".format( + self.new_user["username"] + ) in [el.text for el in flash_msg] + + self.wait_for(user_token_added) + + def _admin_deletes_user(self): + for i in range(CLICK_ATTEMPTS): + try: + self.safe_click_by_css_selector(".delete-user") + self.alert_wait() + self.alert_accept() + break + except TimeoutException: + # Selenium has failed to click, and the confirmation + # alert didn't happen. Try once more. + logging.info("Selenium has failed to click yet again; retrying.") + + def user_deleted(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Deleted user" in flash_msg.text + + self.wait_for(user_deleted) def _admin_can_send_test_alert(self): - alert_button = self.driver.find_element_by_id('test-ossec-alert') + alert_button = self.driver.find_element_by_id("test-ossec-alert") alert_button.click() - if not hasattr(self, 'accept_languages'): - flashed_msg = self.driver.find_element_by_css_selector('.flash') - assert "Test alert sent. Check your email." in flashed_msg.text + def test_alert_sent(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Test alert sent. Please check your email." in flash_msg.text + + self.wait_for(test_alert_sent) - @screenshots def _logout(self): # Click the logout link - logout_link = self.driver.find_element_by_id('link-logout') - logout_link.click() + self.safe_click_by_id("link-logout") + self.wait_for(lambda: self.driver.find_element_by_css_selector(".login-form")) # Logging out should redirect back to the login page def login_page(): - assert ("Login to access the journalist interface" in - self.driver.page_source) + assert "Login to access the journalist interface" in self.driver.page_source + self.wait_for(login_page) - @screenshots def _check_login_with_otp(self, otp): self._logout() - self._login_user(self.new_user['username'], - self.new_user['password'], otp) - if not hasattr(self, 'accept_languages'): - # Test that the new user was logged in successfully - assert 'Sources' in self.driver.page_source + self._login_user(self.new_user["username"], self.new_user["password"], otp) + assert self._is_on_journalist_homepage() - @screenshots def _new_user_can_log_in(self): # Log the admin user out self._logout() + self.wait_for(lambda: self.driver.find_element_by_css_selector(".login-form")) # Log the new user in - self._login_user(self.new_user['username'], - self.new_user['password'], - 'mocked') + self._login_user(self.new_user["username"], self.new_user["password"], self.new_totp) - if not hasattr(self, 'accept_languages'): - # Test that the new user was logged in successfully - assert 'Sources' in self.driver.page_source + assert self._is_on_journalist_homepage() # The new user was not an admin, so they should not have the admin # interface link available with pytest.raises(NoSuchElementException): - self.driver.find_element_by_id('link-admin-index') + self.driver.find_element_by_id("link-admin-index") + + def _new_admin_user_can_log_in(self): + # Test login with mocked token + self._check_login_with_otp(self.new_totp) + + # Newly added user who is an admin can visit admin interface + self._admin_visits_admin_interface() - @screenshots def _edit_account(self): - edit_account_link = self.driver.find_element_by_id( - 'link-edit-account') + edit_account_link = self.driver.find_element_by_id("link-edit-account") edit_account_link.click() # The header says "Edit your account" - h1s = self.driver.find_elements_by_tag_name('h1')[0] - assert 'Edit your account' == h1s.text + def edit_page_loaded(): + h1s = self.driver.find_elements_by_tag_name("h1")[0] + assert "Edit your account" == h1s.text + + self.wait_for(edit_page_loaded) + # There's no link back to the admin interface. with pytest.raises(NoSuchElementException): - self.driver.find_element_by_partial_link_text( - 'Back to admin interface') + self.driver.find_element_by_partial_link_text("Back to admin interface") # There's no field to change your username. with pytest.raises(NoSuchElementException): - self.driver.find_element_by_css_selector('#username') - # There's no checkbox to change the administrator status of your + self.driver.find_element_by_css_selector("#username") + # There's no checkbox to change the admin status of your # account. with pytest.raises(NoSuchElementException): - self.driver.find_element_by_css_selector('#is-admin') + self.driver.find_element_by_css_selector("#is-admin") # 2FA reset buttons at the bottom point to the user URLs for reset. - totp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-totp')[0] - assert ('/account/reset-2fa-totp' in - totp_reset_button.get_attribute('action')) - hotp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-hotp')[0] - assert ('/account/reset-2fa-hotp' in - hotp_reset_button.get_attribute('action')) - - @screenshots - def _edit_user(self, username): - user = Journalist.query.filter_by(username=username).one() - - new_user_edit_links = filter( - lambda el: el.get_attribute('data-username') == username, - self.driver.find_elements_by_tag_name('a')) + totp_reset_button = self.driver.find_elements_by_css_selector("#reset-two-factor-totp")[0] + assert "/account/reset-2fa-totp" in totp_reset_button.get_attribute("action") + hotp_reset_button = self.driver.find_elements_by_css_selector("#reset-two-factor-hotp")[0] + assert "/account/reset-2fa-hotp" in hotp_reset_button.get_attribute("action") + + def _edit_user(self, username, is_admin=False): + self.wait_for(lambda: self.driver.find_element_by_id("users")) + + new_user_edit_links = [ + el + for el in self.driver.find_elements_by_tag_name("a") + if el.get_attribute("data-username") == username + ] + assert 1 == len(new_user_edit_links) new_user_edit_links[0].click() - # The header says "Edit user "username"". - h1s = self.driver.find_elements_by_tag_name('h1')[0] - assert 'Edit user "{}"'.format(username) == h1s.text + + def edit_user_page_loaded(): + h1s = self.driver.find_elements_by_tag_name("h1")[0] + assert 'Edit user "{}"'.format(username) == h1s.text + + self.wait_for(edit_user_page_loaded) + # There's a convenient link back to the admin interface. admin_interface_link = self.driver.find_element_by_partial_link_text( - 'Back to admin interface') - assert re.search('/admin$', admin_interface_link.get_attribute('href')) + "Back to admin interface" + ) + assert re.search("/admin$", admin_interface_link.get_attribute("href")) # There's a field to change the user's username and it's already filled # out with the user's username. - username_field = self.driver.find_element_by_css_selector('#username') - assert username_field.get_attribute('placeholder') == username - # There's a checkbox to change the administrator status of the user and + username_field = self.driver.find_element_by_css_selector("#username") + assert username_field.get_attribute("value") == username + # There's a checkbox to change the admin status of the user and # it's already checked appropriately to reflect the current status of # our user. - username_field = self.driver.find_element_by_css_selector('#is-admin') - assert (bool(username_field.get_attribute('checked')) == - user.is_admin) + username_field = self.driver.find_element_by_css_selector("#is-admin") + assert bool(username_field.get_attribute("checked")) == is_admin # 2FA reset buttons at the bottom point to the admin URLs for # resettting 2FA and include the correct user id in the hidden uid. - totp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-totp')[0] - assert '/admin/reset-2fa-totp' in totp_reset_button.get_attribute( - 'action') - totp_reset_uid = totp_reset_button.find_element_by_name('uid') - assert int(totp_reset_uid.get_attribute('value')) == user.id + totp_reset_button = self.driver.find_elements_by_css_selector("#reset-two-factor-totp")[0] + assert "/admin/reset-2fa-totp" in totp_reset_button.get_attribute("action") + totp_reset_uid = totp_reset_button.find_element_by_name("uid") assert totp_reset_uid.is_displayed() is False - hotp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-hotp')[0] - assert '/admin/reset-2fa-hotp' in hotp_reset_button.get_attribute( - 'action') + hotp_reset_button = self.driver.find_elements_by_css_selector("#reset-two-factor-hotp")[0] + assert "/admin/reset-2fa-hotp" in hotp_reset_button.get_attribute("action") - hotp_reset_uid = hotp_reset_button.find_element_by_name('uid') - assert int(hotp_reset_uid.get_attribute('value')) == user.id + hotp_reset_uid = hotp_reset_button.find_element_by_name("uid") assert hotp_reset_uid.is_displayed() is False - @screenshots def _admin_can_edit_new_user(self): # Log the new user out self._logout() - self._login_user(self.admin.username, self.admin_pw, 'mocked') + self.wait_for(lambda: self.driver.find_element_by_css_selector(".login-form")) + + self._login_user(self.admin, self.admin_pw, self.admin_user["totp"]) # Go to the admin interface - admin_interface_link = self.driver.find_element_by_id( - 'link-admin-index') - admin_interface_link.click() + self.safe_click_by_id("link-admin-index") + + self.wait_for(lambda: self.driver.find_element_by_css_selector("button#add-user")) # Click the "edit user" link for the new user # self._edit_user(self.new_user['username']) - new_user_edit_links = filter( - lambda el: (el.get_attribute('data-username') == - self.new_user['username']), - self.driver.find_elements_by_tag_name('a')) + new_user_edit_links = [ + el + for el in self.driver.find_elements_by_tag_name("a") + if (el.get_attribute("data-username") == self.new_user["username"]) + ] assert len(new_user_edit_links) == 1 new_user_edit_links[0].click() def can_edit_user(): - assert ('"{}"'.format(self.new_user['username']) in - self.driver.page_source) + h = self.driver.find_elements_by_tag_name("h1")[0] + assert 'Edit user "{}"'.format(self.new_user["username"]) == h.text + self.wait_for(can_edit_user) - new_username = self.new_user['username'] + "2" + new_characters = "2" + new_username = self.new_user["username"] + new_characters + + self.safe_send_keys_by_css_selector('input[name="username"]', new_characters) + self.safe_click_by_css_selector("button[type=submit]") + + def user_edited(): + if not hasattr(self, "accept_languages"): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Account updated." in flash_msg.text - username_field = self.driver.find_element_by_css_selector( - 'input[name="username"]') - username_field.send_keys(new_username) - update_user_btn = self.driver.find_element_by_css_selector( - 'button[type=submit]') - update_user_btn.click() + self.wait_for(user_edited) def can_edit_user2(): - assert ('"{}"'.format(new_username) in self.driver.page_source) + assert '"{}"'.format(new_username) in self.driver.page_source + self.wait_for(can_edit_user2) # Update self.new_user with the new username for the future tests - self.new_user['username'] = new_username + self.new_user["username"] = new_username # Log the new user in with their new username self._logout() - self._login_user(self.new_user['username'], - self.new_user['password'], - 'mocked') - if not hasattr(self, 'accept_languages'): - def found_sources(): - assert 'Sources' in self.driver.page_source - self.wait_for(found_sources) + + self.wait_for(lambda: self.driver.find_element_by_css_selector(".login-form")) + + self._login_user(self.new_user["username"], self.new_user["password"], self.new_totp) + + assert self._is_on_journalist_homepage() # Log the admin user back in self._logout() - self._login_user(self.admin.username, self.admin_pw, 'mocked') + + self.wait_for(lambda: self.driver.find_element_by_css_selector(".login-form")) + + self._login_user(self.admin, self.admin_pw, self.admin_user["totp"]) # Go to the admin interface - admin_interface_link = self.driver.find_element_by_id( - 'link-admin-index') - admin_interface_link.click() - - # Edit the new user's password - self._edit_user(self.new_user['username']) - new_password = self.driver.find_element_by_css_selector('#password') \ - .text.strip() - self.new_user['password'] = new_password - - reset_pw_btn = self.driver.find_element_by_css_selector( - '#reset-password') + self.safe_click_by_id("link-admin-index") + + self.wait_for(lambda: self.driver.find_element_by_css_selector("button#add-user")) + + selector = 'a[data-username="{}"]'.format(self.new_user["username"]) + new_user_edit_links = self.driver.find_elements_by_css_selector(selector) + assert len(new_user_edit_links) == 1 + self.safe_click_by_css_selector(selector) + + self.wait_for(can_edit_user) + + new_password = self.driver.find_element_by_css_selector("#password").text.strip() + self.new_user["password"] = new_password + + reset_pw_btn = self.driver.find_element_by_css_selector("#reset-password") reset_pw_btn.click() def update_password_success(): - assert 'Password updated.' in self.driver.page_source + assert "Password updated." in self.driver.page_source # Wait until page refreshes to avoid causing a broken pipe error (#623) self.wait_for(update_password_success) # Log the new user in with their new password self._logout() - self._login_user(self.new_user['username'], - self.new_user['password'], - 'mocked') - self.wait_for(found_sources) + self._login_user(self.new_user["username"], self.new_user["password"], self.new_totp) + + assert self._is_on_journalist_homepage() - @screenshots def _journalist_checks_messages(self): self.driver.get(self.journalist_location) # There should be 1 collection in the list of collections - code_names = self.driver.find_elements_by_class_name('code-name') - assert 1 == len(code_names) + code_names = self.driver.find_elements_by_class_name("code-name") + assert 0 != len(code_names), code_names + assert 1 <= len(code_names), code_names - if not hasattr(self, 'accept_languages'): + if not hasattr(self, "accept_languages"): # There should be a "1 unread" span in the sole collection entry - unread_span = self.driver.find_element_by_css_selector( - 'span.unread') + unread_span = self.driver.find_element_by_css_selector("span.unread") assert "1 unread" in unread_span.text - @screenshots def _journalist_stars_and_unstars_single_message(self): # Message begins unstarred with pytest.raises(NoSuchElementException): - self.driver.find_element_by_id('starred-source-link-1') + self.driver.find_element_by_id("starred-source-link-1") # Journalist stars the message - self.driver.find_element_by_class_name('button-star').click() - starred = self.driver.find_elements_by_id('starred-source-link-1') - assert 1 == len(starred) + self.driver.find_element_by_class_name("button-star").click() + + def message_starred(): + starred = self.driver.find_elements_by_id("starred-source-link-1") + assert 1 == len(starred) + + self.wait_for(message_starred) # Journalist unstars the message - self.driver.find_element_by_class_name('button-star').click() - with pytest.raises(NoSuchElementException): - self.driver.find_element_by_id('starred-source-link-1') + self.driver.find_element_by_class_name("button-star").click() + + def message_unstarred(): + with pytest.raises(NoSuchElementException): + self.driver.find_element_by_id("starred-source-link-1") + + self.wait_for(message_unstarred) - @screenshots def _journalist_selects_all_sources_then_selects_none(self): - self.driver.find_element_by_id('select_all').click() - checkboxes = self.driver.find_elements_by_id('checkbox') + self.driver.find_element_by_id("select_all").click() + checkboxes = self.driver.find_elements_by_id("checkbox") for checkbox in checkboxes: assert checkbox.is_selected() - self.driver.find_element_by_id('select_none').click() - checkboxes = self.driver.find_elements_by_id('checkbox') + self.driver.find_element_by_id("select_none").click() + checkboxes = self.driver.find_elements_by_id("checkbox") for checkbox in checkboxes: assert checkbox.is_selected() is False def _journalist_selects_the_first_source(self): - self.driver.find_element_by_css_selector( - '#un-starred-source-link-1').click() + self.driver.find_element_by_css_selector("#un-starred-source-link-1").click() - def _journalist_selects_documents_to_download(self): - self.driver.find_element_by_id('select_all').click() + def _journalist_selects_all_documents(self): + checkboxes = self.driver.find_elements_by_name("doc_names_selected") + for checkbox in checkboxes: + checkbox.click() - @screenshots def _journalist_downloads_message(self): self._journalist_selects_the_first_source() - submissions = self.driver.find_elements_by_css_selector( - '#submissions a') + self.wait_for(lambda: self.driver.find_element_by_css_selector("ul#submissions")) + + submissions = self.driver.find_elements_by_css_selector("#submissions a") assert 1 == len(submissions) - file_url = submissions[0].get_attribute('href') + file_url = submissions[0].get_attribute("href") # Downloading files with Selenium is tricky because it cannot automate # the browser's file download dialog. We can directly request the file - # using urllib2, but we need to pass the cookies for the logged in user + # using requests, but we need to pass the cookies for logged in user # for Flask to allow this. def cookie_string_from_selenium_cookies(cookies): - cookie_strs = [] + result = {} for cookie in cookies: - cookie_str = "=".join([cookie['name'], cookie['value']]) + ';' - cookie_strs.append(cookie_str) - return ' '.join(cookie_strs) + result[cookie["name"]] = cookie["value"] + return result - submission_req = urllib2.Request(file_url) - submission_req.add_header( - 'Cookie', - cookie_string_from_selenium_cookies( - self.driver.get_cookies())) - raw_content = urllib2.urlopen(submission_req).read() + cks = cookie_string_from_selenium_cookies(self.driver.get_cookies()) + raw_content = self.return_downloaded_content(file_url, cks) decrypted_submission = self.gpg.decrypt(raw_content) - submission = self._get_submission_content(file_url, - decrypted_submission) + submission = self._get_submission_content(file_url, decrypted_submission) + if type(submission) == bytes: + submission = submission.decode("utf-8") + assert self.secret_message == submission def _journalist_composes_reply(self): - reply_text = ('Thanks for the documents. Can you submit more ' - 'information about the main program?') - self.wait_for(lambda: self.driver.find_element_by_id( - 'reply-text-field' - ), timeout=60) - self.driver.find_element_by_id('reply-text-field').send_keys( - reply_text + reply_text = ( + "Thanks for the documents. Can you submit more " "information about the main program?" ) + self.wait_for(lambda: self.driver.find_element_by_id("reply-text-field")) + self.safe_send_keys_by_id("reply-text-field", reply_text) def _journalist_sends_reply_to_source(self): self._journalist_composes_reply() - self.driver.find_element_by_id('reply-button').click() + self.driver.find_element_by_id("reply-button").click() - if not hasattr(self, 'accept_languages'): - assert ("Thanks. Your reply has been stored." in - self.driver.page_source) + def reply_stored(): + if not hasattr(self, "accept_languages"): + assert "Thanks. Your reply has been stored." in self.driver.page_source - def _visit_edit_account(self): - edit_account_link = self.driver.find_element_by_id( - 'link-edit-account') - edit_account_link.click() + self.wait_for(reply_stored) - def _visit_edit_secret(self, type): - reset_form = self.driver.find_elements_by_css_selector( - '#reset-two-factor-' + type)[0] - assert ('/account/reset-2fa-' + type in - reset_form.get_attribute('action')) + def _visit_edit_account(self): + self.safe_click_by_id("link-edit-account") + def _visit_edit_secret(self, otp_type, tooltip_text=''): + reset_form = self.wait_for( + lambda: self.driver.find_element_by_id("reset-two-factor-" + otp_type) + ) + assert "/account/reset-2fa-" + otp_type in reset_form.get_attribute("action") reset_button = self.driver.find_elements_by_css_selector( - '#button-reset-two-factor-' + type)[0] - reset_button.click() - - def _visit_edit_hotp_secret(self): - self._visit_edit_secret('hotp') + "#button-reset-two-factor-" + otp_type)[0] + + # 2FA reset buttons show a tooltip with explanatory text on hover. + # Also, confirm the text on the tooltip is the correct one. + reset_button.location_once_scrolled_into_view + ActionChains(self.driver).move_to_element(reset_button).perform() + time.sleep(1) + explanatory_tooltip_opacity = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-" + otp_type + " span")[0].value_of_css_property("opacity") + explanatory_tooltip_content = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-" + otp_type + " span")[0].text + + assert explanatory_tooltip_opacity == "1" + if not hasattr(self, "accept_languages"): + assert explanatory_tooltip_content == tooltip_text + reset_form.submit() + + alert = self.driver.switch_to_alert() + alert.accept() def _set_hotp_secret(self): - hotp_secret_field = self.driver.find_elements_by_css_selector( - 'input[name="otp_secret"]')[0] - hotp_secret_field.send_keys('123456') - submit_button = self.driver.find_element_by_css_selector( - 'button[type=submit]') - submit_button.click() + self.safe_send_keys_by_css_selector('input[name="otp_secret"]', "123456") + self.safe_click_by_css_selector("button[type=submit]") + + def _visit_edit_hotp_secret(self): + self._visit_edit_secret( + "hotp", + "Reset two-factor authentication for security keys like Yubikey") def _visit_edit_totp_secret(self): - self._visit_edit_secret('totp') + self._visit_edit_secret( + "totp", + "Reset two-factor authentication for mobile apps such as FreeOTP or " + "Google Authenticator" + ) def _admin_visits_add_user(self): - add_user_btn = self.driver.find_element_by_css_selector( - 'button#add-user') + add_user_btn = self.driver.find_element_by_css_selector("button#add-user") + self.wait_for(lambda: add_user_btn.is_enabled() and add_user_btn.is_displayed()) add_user_btn.click() + self.wait_for(lambda: self.driver.find_element_by_id("username")) + def _admin_visits_edit_user(self): - new_user_edit_links = filter( - lambda el: (el.get_attribute('data-username') == - self.new_user['username']), - self.driver.find_elements_by_tag_name('a')) + selector = 'a[data-username="{}"]'.format(self.new_user["username"]) + new_user_edit_links = self.driver.find_elements_by_css_selector(selector) assert len(new_user_edit_links) == 1 - new_user_edit_links[0].click() - - def can_edit_user(): - assert ('"{}"'.format(self.new_user['username']) in - self.driver.page_source) - self.wait_for(can_edit_user) + self.safe_click_by_css_selector(selector) + try: + self.wait_for(lambda: self.driver.find_element_by_id("new-password")) + except NoSuchElementException: + # try once more + self.safe_click_by_css_selector(selector) + self.wait_for(lambda: self.driver.find_element_by_id("new-password")) + + def retry_2fa_pop_ups(self, navigation_step, button_to_click): + """Clicking on Selenium alerts can be flaky. We need to retry them if they timeout.""" + + for i in range(CLICK_ATTEMPTS): + try: + try: + # This is the button we click to trigger the alert. + self.wait_for(lambda: self.driver.find_elements_by_id( + button_to_click)[0]) + except IndexError: + # If the button isn't there, then the alert is up from the last + # time we attempted to run this test. Switch to it and accept it. + self.alert_wait() + self.alert_accept() + break + + # The alert isn't up. Run the rest of the logic. + navigation_step() + + self.alert_wait() + self.alert_accept() + break + except TimeoutException: + # Selenium has failed to click, and the confirmation + # alert didn't happen. We'll try again. + logging.info("Selenium has failed to click; retrying.") def _admin_visits_reset_2fa_hotp(self): - hotp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-hotp')[0] - assert ('/admin/reset-2fa-hotp' in - hotp_reset_button.get_attribute('action')) - hotp_reset_button.click() + def _admin_visits_reset_2fa_hotp_step(): + # 2FA reset buttons show a tooltip with explanatory text on hover. + # Also, confirm the text on the tooltip is the correct one. + hotp_reset_button = self.driver.find_elements_by_id( + "reset-two-factor-hotp")[0] + hotp_reset_button.location_once_scrolled_into_view + ActionChains(self.driver).move_to_element(hotp_reset_button).perform() - def _admin_visits_reset_2fa_totp(self): - totp_reset_button = self.driver.find_elements_by_css_selector( - '#reset-two-factor-totp')[0] - assert ('/admin/reset-2fa-totp' in - totp_reset_button.get_attribute('action')) - totp_reset_button.click() + time.sleep(1) - def _admin_creates_a_user(self, hotp): - add_user_btn = self.driver.find_element_by_css_selector( - 'button#add-user') - add_user_btn.click() + tip_opacity = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-hotp span")[0].value_of_css_property('opacity') + tip_text = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-hotp span")[0].text + + assert tip_opacity == "1" + + if not hasattr(self, "accept_languages"): + assert tip_text == "Reset two-factor authentication for security keys like Yubikey" - self.new_user = dict( - username='dellsberg', - password='pentagonpapers') + self.safe_click_by_id("button-reset-two-factor-hotp") - self._add_user(self.new_user['username'], + # Run the above step in a retry loop + self.retry_2fa_pop_ups(_admin_visits_reset_2fa_hotp_step, "reset-two-factor-hotp") + + def _admin_visits_edit_hotp(self): + self.wait_for(lambda: self.driver.find_element_by_css_selector('input[name="otp_secret"]')) + + def _admin_visits_reset_2fa_totp(self): + def _admin_visits_reset_2fa_totp_step(): + totp_reset_button = self.driver.find_elements_by_id("reset-two-factor-totp")[0] + assert "/admin/reset-2fa-totp" in totp_reset_button.get_attribute("action") + # 2FA reset buttons show a tooltip with explanatory text on hover. + # Also, confirm the text on the tooltip is the correct one. + totp_reset_button = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-totp")[0] + totp_reset_button.location_once_scrolled_into_view + ActionChains(self.driver).move_to_element(totp_reset_button).perform() + + time.sleep(1) + + tip_opacity = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-totp span")[0].value_of_css_property('opacity') + tip_text = self.driver.find_elements_by_css_selector( + "#button-reset-two-factor-totp span")[0].text + + assert tip_opacity == "1" + if not hasattr(self, "accept_languages"): + expected_text = ( + "Reset two-factor authentication for mobile apps such as FreeOTP " + "or Google Authenticator" + ) + assert tip_text == expected_text + + self.safe_click_by_id("button-reset-two-factor-totp") + + # Run the above step in a retry loop + self.retry_2fa_pop_ups(_admin_visits_reset_2fa_totp_step, "reset-two-factor-totp") + + def _admin_creates_a_user(self, hotp): + self.safe_click_by_id("add-user") + self.wait_for(lambda: self.driver.find_element_by_id("username")) + self.new_user = dict(username="dellsberg", + first_name='', + last_name='', + password="pentagonpapers") + self._add_user(self.new_user["username"], + first_name=self.new_user['first_name'], + last_name=self.new_user['last_name'], is_admin=False, hotp=hotp) def _journalist_delete_all(self): - for checkbox in self.driver.find_elements_by_name( - 'doc_names_selected'): + for checkbox in self.driver.find_elements_by_name("doc_names_selected"): checkbox.click() - self.driver.find_element_by_id('delete-selected').click() - def _journalist_confirm_delete_all(self): + delete_selected_link = self.driver.find_element_by_id("delete-selected-link") + ActionChains(self.driver).move_to_element(delete_selected_link).click().perform() + + def _journalist_confirm_delete_selected(self): self.wait_for( - lambda: self.driver.find_element_by_id('confirm-delete')) - confirm_btn = self.driver.find_element_by_id('confirm-delete') - confirm_btn.click() + lambda: expected_conditions.element_to_be_clickable((By.ID, "delete-selected")) + ) + confirm_btn = self.driver.find_element_by_id("delete-selected") + confirm_btn.location_once_scrolled_into_view + ActionChains(self.driver).move_to_element(confirm_btn).click().perform() def _source_delete_key(self): - filesystem_id = crypto_util.hash_codename(self.source_name) - crypto_util.delete_reply_keypair(filesystem_id) + filesystem_id = self.source_app.crypto_util.hash_codename(self.source_name) + self.source_app.crypto_util.delete_reply_keypair(filesystem_id) def _journalist_continues_after_flagging(self): - self.driver.find_element_by_id('continue-to-list').click() + self.wait_for(lambda: self.driver.find_element_by_id("continue-to-list")) + continue_link = self.driver.find_element_by_id("continue-to-list") + + actions = ActionChains(self.driver) + actions.move_to_element(continue_link).perform() + continue_link.click() def _journalist_delete_none(self): - self.driver.find_element_by_id('delete-selected').click() + self.driver.find_element_by_id("delete-selected-link").click() - def _journalist_delete_all_javascript(self): - self.driver.find_element_by_id('select_all').click() - self.driver.find_element_by_id('delete-selected').click() - self._alert_wait() + def _journalist_delete_all_confirmation(self): + self.safe_click_all_by_css_selector("[name=doc_names_selected]") + self.safe_click_by_css_selector("a#delete-selected-link > button.danger") def _journalist_delete_one(self): - self.driver.find_elements_by_name('doc_names_selected')[0].click() - self.driver.find_element_by_id('delete-selected').click() + self.safe_click_by_css_selector("[name=doc_names_selected]") + + el = WebDriverWait(self.driver, self.timeout, self.poll_frequency).until( + expected_conditions.element_to_be_clickable((By.ID, "delete-selected-link")) + ) + el.location_once_scrolled_into_view + ActionChains(self.driver).move_to_element(el).click().perform() def _journalist_flags_source(self): - self.driver.find_element_by_id('flag-button').click() + self.safe_click_by_id("flag-button") def _journalist_visits_admin(self): self.driver.get(self.journalist_location + "/admin") def _journalist_fail_login(self): - self.user, self.user_pw = db_helper.init_journalist() - self._try_login_user(self.user.username, 'worse', 'mocked') + self._try_login_user("root", "worse", "mocked") def _journalist_fail_login_many(self): - self.user, self.user_pw = db_helper.init_journalist() - for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD + 1): - self._try_login_user(self.user.username, 'worse', 'mocked') - - def _admin_enters_journalist_account_details_hotp(self, username, - hotp_secret): - username_field = self.driver.find_element_by_css_selector( - 'input[name="username"]') - username_field.send_keys(username) - - hotp_secret_field = self.driver.find_element_by_css_selector( - 'input[name="otp_secret"]') - hotp_secret_field.send_keys(hotp_secret) - - hotp_checkbox = self.driver.find_element_by_css_selector( - 'input[name="is_hotp"]') - hotp_checkbox.click() + self.user = "" + for _ in range(5 + 1): + self._try_login_user(self.user, "worse", "mocked") + + def _admin_enters_journalist_account_details_hotp(self, username, hotp_secret): + self.safe_send_keys_by_css_selector('input[name="username"]', username) + self.safe_send_keys_by_css_selector('input[name="otp_secret"]', hotp_secret) + self.safe_click_by_css_selector('input[name="is_hotp"]') + + def _journalist_uses_js_filter_by_sources(self): + filter_box = self.safe_send_keys_by_id("filter", "thiswordisnotinthewordlist") + sources = self.driver.find_elements_by_class_name("code-name") + assert len(sources) > 0 + for source in sources: + assert source.is_displayed() is False + filter_box.clear() + filter_box.send_keys(Keys.RETURN) + + for source in sources: + assert source.is_displayed() is True + + def _journalist_source_selection_honors_filter(self): + """Check that select all/none honors the filter in effect.""" + + self.wait_for(lambda: self.driver.find_element_by_id("filter"), 60) + + # make sure the list is not filtered + filter_box = self.driver.find_element_by_id("filter") + filter_box.clear() + filter_box.send_keys(Keys.RETURN) + + # get the journalist designation of the first source + sources = self.driver.find_elements_by_class_name("code-name") + assert len(sources) > 0 + first_source_designation = sources[0].text + + # filter the source list so only the first is visible + filter_box.send_keys(first_source_designation) + for source in sources: + assert source.text == first_source_designation or source.is_displayed() is False + + # clicking "select all" should only select the visible source + select_all = self.driver.find_element_by_id("select_all") + select_all.click() + + source_rows = self.driver.find_elements_by_css_selector("#cols li.source") + for source_row in source_rows: + source_designation = source_row.get_attribute("data-source-designation") + checkbox = source_row.find_element_by_css_selector("input[type=checkbox]") + if source_designation == first_source_designation: + assert checkbox.is_selected() + else: + assert not checkbox.is_selected() + + # clear the filter + filter_box.clear() + filter_box.send_keys(Keys.RETURN) + + # select all sources + select_all.click() + for source_row in source_rows: + checkbox = source_row.find_element_by_css_selector("input[type=checkbox]") + assert checkbox.is_selected() + + # now filter again + filter_box.send_keys(first_source_designation) + + # clicking "select none" should only deselect the visible source + select_none = self.driver.find_element_by_id("select_none") + select_none.click() + for source_row in source_rows: + source_designation = source_row.get_attribute("data-source-designation") + checkbox = source_row.find_element_by_css_selector("input[type=checkbox]") + if source_designation == first_source_designation: + assert not checkbox.is_selected() + else: + assert checkbox.is_selected() + + # clear the filter and leave none selected + filter_box.clear() + filter_box.send_keys(Keys.RETURN) + select_none.click() + + for source_row in source_rows: + assert source_row.is_displayed() + checkbox = source_row.find_element_by_css_selector("input[type=checkbox]") + assert not checkbox.is_selected() + + def _journalist_uses_js_buttons_to_download_unread(self): + self.driver.find_element_by_id("select_all").click() + checkboxes = self.driver.find_elements_by_name("doc_names_selected") + assert len(checkboxes) > 0 + for checkbox in checkboxes: + assert checkbox.is_selected() + + self.driver.find_element_by_id("select_none").click() + checkboxes = self.driver.find_elements_by_name("doc_names_selected") + for checkbox in checkboxes: + assert checkbox.is_selected() is False + + self.driver.find_element_by_id("select_unread").click() + checkboxes = self.driver.find_elements_by_name("doc_names_selected") + for checkbox in checkboxes: + classes = checkbox.get_attribute("class") + assert "unread-cb" in classes diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -1,229 +1,198 @@ import tempfile import time +import json -from selenium.webdriver.common.action_chains import ActionChains -from step_helpers import screenshots +class SourceNavigationStepsMixin: + def _is_on_source_homepage(self): + return self.wait_for(lambda: self.driver.find_element_by_id("source-index")) -class SourceNavigationStepsMixin(): + def _is_logged_in(self): + return self.wait_for(lambda: self.driver.find_element_by_id("logout")) + + def _is_on_lookup_page(self): + return self.wait_for(lambda: self.driver.find_element_by_id("upload")) + + def _is_on_generate_page(self): + return self.wait_for(lambda: self.driver.find_element_by_id("create-form")) - @screenshots def _source_visits_source_homepage(self): self.driver.get(self.source_location) + assert self._is_on_source_homepage() - if not hasattr(self, 'accept_languages'): - assert ("SecureDrop | Protecting Journalists and Sources" == - self.driver.title) + def _source_checks_instance_metadata(self): + self.driver.get(self.source_location + "/metadata") + j = json.loads(self.driver.find_element_by_tag_name("body").text) + assert j["server_os"] == "16.04" + assert j["sd_version"] == self.source_app.jinja_env.globals["version"] + assert j["gpg_fpr"] != "" def _source_clicks_submit_documents_on_homepage(self): - # First move the cursor to a known position in case it happens to - # be hovering over one of the buttons we are testing below. - header_image = self.driver.find_element_by_css_selector('.header') - ActionChains(self.driver).move_to_element(header_image).perform() # It's the source's first time visiting this SecureDrop site, so they # choose to "Submit Documents". - submit_button = self.driver.find_element_by_id( - 'submit-documents-button') - - submit_button_icon = self.driver.find_element_by_css_selector( - 'a#submit-documents-button > img.off-hover') - assert submit_button_icon.is_displayed() + self.safe_click_by_id("submit-documents-button") - # The source hovers their cursor over the button, and the visual style - # of the button changes to encourage them to click it. - ActionChains(self.driver).move_to_element(submit_button).perform() - - # Let's make sure toggling the icon image with the hover state - # is working. - assert submit_button_icon.is_displayed() is False - submit_button_hover_icon = self.driver.find_element_by_css_selector( - 'a#submit-documents-button > img.on-hover') - assert submit_button_hover_icon.is_displayed() - - # The source clicks the submit button. - submit_button.click() + # The source should now be on the page where they are presented with + # a diceware codename they can use for subsequent logins + assert self._is_on_generate_page() - @screenshots def _source_chooses_to_submit_documents(self): self._source_clicks_submit_documents_on_homepage() - codename = self.driver.find_element_by_css_selector('#codename') + codename = self.driver.find_element_by_css_selector("#codename") assert len(codename.text) > 0 self.source_name = codename.text def _source_shows_codename(self): - content = self.driver.find_element_by_id('codename-hint-content') + content = self.driver.find_element_by_id("codename-hint-content") assert not content.is_displayed() - self.driver.find_element_by_id('codename-hint-show').click() + + self.safe_click_by_id("codename-hint-show") + + self.wait_for(lambda: content.is_displayed()) assert content.is_displayed() - content_content = self.driver.find_element_by_css_selector( - '#codename-hint-content p') + content_content = self.driver.find_element_by_css_selector("#codename-hint-content p") assert content_content.text == self.source_name def _source_hides_codename(self): - content = self.driver.find_element_by_id('codename-hint-content') + content = self.driver.find_element_by_id("codename-hint-content") assert content.is_displayed() - self.driver.find_element_by_id('codename-hint-hide').click() + + self.safe_click_by_id("codename-hint-hide") + + self.wait_for(lambda: not content.is_displayed()) assert not content.is_displayed() def _source_sees_no_codename(self): - codename = self.driver.find_elements_by_css_selector('.code-reminder') + codename = self.driver.find_elements_by_css_selector(".code-reminder") assert len(codename) == 0 - @screenshots def _source_chooses_to_login(self): - self.driver.find_element_by_id('login-button').click() - - logins = self.driver.find_elements_by_id( - 'login-with-existing-codename') + self.driver.find_element_by_id("login-button").click() - assert len(logins) > 0 + self.wait_for(lambda: self.driver.find_elements_by_id("login-with-existing-codename")) - @screenshots def _source_hits_cancel_at_login_page(self): - self.driver.find_element_by_id('cancel').click() + self.driver.find_element_by_id("cancel").click() self.driver.get(self.source_location) - if not hasattr(self, 'accept_languages'): - assert ("SecureDrop | Protecting Journalists and Sources" == - self.driver.title) + assert self._is_on_source_homepage() - @screenshots def _source_proceeds_to_login(self): - codename_input = self.driver.find_element_by_id( - 'login-with-existing-codename') - codename_input.send_keys(self.source_name) - - continue_button = self.driver.find_element_by_id('login') - continue_button.click() + self.safe_send_keys_by_id("login-with-existing-codename", self.source_name) + self.safe_click_by_id("login") - if not hasattr(self, 'accept_languages'): - assert ("SecureDrop | Protecting Journalists and Sources" == - self.driver.title) # Check that we've logged in + assert self._is_logged_in() replies = self.driver.find_elements_by_id("replies") assert len(replies) == 1 def _source_enters_codename_in_login_form(self): - codename_input = self.driver.find_element_by_id( - 'login-with-existing-codename') - codename_input.send_keys('ascension hypertext concert synopses') + self.safe_send_keys_by_id( + "login-with-existing-codename", "ascension hypertext concert synopses" + ) - @screenshots def _source_hits_cancel_at_submit_page(self): - self.driver.find_element_by_id('cancel').click() + self.driver.find_element_by_id("cancel").click() - if not hasattr(self, 'accept_languages'): - headline = self.driver.find_element_by_class_name('headline') - assert 'Submit Materials' == headline.text + if not hasattr(self, "accept_languages"): + headline = self.driver.find_element_by_class_name("headline") + assert "Submit Files or Messages" == headline.text - @screenshots def _source_continues_to_submit_page(self): - continue_button = self.driver.find_element_by_id('continue-button') - - continue_button_icon = self.driver.find_element_by_css_selector( - 'button#continue-button > img.off-hover') - assert continue_button_icon.is_displayed() - - # Hover over the continue button test toggle the icon images - # with the hover state. - ActionChains(self.driver).move_to_element(continue_button).perform() - assert continue_button_icon.is_displayed() is False + self.safe_click_by_id("continue-button") - continue_button_hover_icon = self.driver.find_element_by_css_selector( - 'button#continue-button img.on-hover' - ) - assert continue_button_hover_icon.is_displayed() - - continue_button.click() + def submit_page_loaded(): + if not hasattr(self, "accept_languages"): + headline = self.driver.find_element_by_class_name("headline") + assert "Submit Files or Messages" == headline.text - if not hasattr(self, 'accept_languages'): - headline = self.driver.find_element_by_class_name('headline') - assert 'Submit Materials' == headline.text + self.wait_for(submit_page_loaded) - @screenshots def _source_submits_a_file(self): with tempfile.NamedTemporaryFile() as file: - file.write(self.secret_message) + file.write(self.secret_message.encode("utf-8")) file.seek(0) filename = file.name - file_upload_box = self.driver.find_element_by_css_selector( - '[name=fh]') - file_upload_box.send_keys(filename) + self.safe_send_keys_by_css_selector("[name=fh]", filename) - submit_button = self.driver.find_element_by_id('submit-doc-button') - ActionChains(self.driver).move_to_element(submit_button).perform() + self.safe_click_by_id("submit-doc-button") + self.wait_for_source_key(self.source_name) - toggled_submit_button_icon = ( - self.driver.find_element_by_css_selector( - 'button#submit-doc-button img.on-hover')) - assert toggled_submit_button_icon.is_displayed() + def file_submitted(): + if not hasattr(self, "accept_languages"): + notification = self.driver.find_element_by_css_selector(".success") + expected_notification = "Thank you for sending this information to us" + assert expected_notification in notification.text - submit_button.click() - self.wait_for_source_key(self.source_name) + # Allow extra time for file uploads + self.wait_for(file_submitted, timeout=(self.timeout * 3)) - if not hasattr(self, 'accept_languages'): - notification = self.driver.find_element_by_css_selector( - '.success') - expected_notification = ( - 'Thank you for sending this information to us') - assert expected_notification in notification.text + # allow time for reply key to be generated + time.sleep(self.timeout) - @screenshots def _source_submits_a_message(self): self._source_enters_text_in_message_field() self._source_clicks_submit_button_on_submission_page() - if not hasattr(self, 'accept_languages'): - notification = self.driver.find_element_by_css_selector( - '.success') - assert 'Thank' in notification.text + def message_submitted(): + if not hasattr(self, "accept_languages"): + notification = self.driver.find_element_by_css_selector(".success") + assert "Thank" in notification.text + + self.wait_for(message_submitted) + + # allow time for reply key to be generated + time.sleep(self.timeout) def _source_enters_text_in_message_field(self): - text_box = self.driver.find_element_by_css_selector('[name=msg]') - text_box.send_keys(self.secret_message) + self.safe_send_keys_by_css_selector("[name=msg]", self.secret_message) def _source_clicks_submit_button_on_submission_page(self): - submit_button = self.driver.find_element_by_id('submit-doc-button') + submit_button = self.driver.find_element_by_id("submit-doc-button") submit_button.click() - self.wait_for_source_key(self.source_name) - @screenshots def _source_deletes_a_journalist_reply(self): # Get the reply filename so we can use IDs to select the delete buttons - reply_filename_element = self.driver.find_element_by_name( - 'reply_filename') - reply_filename = reply_filename_element.get_attribute('value') + reply_filename_element = self.driver.find_element_by_name("reply_filename") + reply_filename = reply_filename_element.get_attribute("value") - delete_button_id = 'delete-reply-{}'.format(reply_filename) + delete_button_id = "delete-reply-{}".format(reply_filename) delete_button = self.driver.find_element_by_id(delete_button_id) delete_button.click() - confirm_button_id = 'confirm-delete-reply-button-{}'.format( - reply_filename) + def confirm_displayed(): + confirm_button_id = "confirm-delete-reply-button-{}".format(reply_filename) + confirm_button = self.driver.find_element_by_id(confirm_button_id) + confirm_button.location_once_scrolled_into_view + assert confirm_button.is_displayed() + + self.wait_for(confirm_displayed) + + confirm_button_id = "confirm-delete-reply-button-{}".format(reply_filename) confirm_button = self.driver.find_element_by_id(confirm_button_id) - assert confirm_button.is_displayed() confirm_button.click() - if not hasattr(self, 'accept_languages'): - notification = self.driver.find_element_by_class_name( - 'notification') - assert 'Reply deleted' in notification.text + def reply_deleted(): + if not hasattr(self, "accept_languages"): + notification = self.driver.find_element_by_class_name("notification") + assert "Reply deleted" in notification.text + + self.wait_for(reply_deleted) - @screenshots def _source_logs_out(self): - self.driver.find_element_by_id('logout').click() - assert self.driver.find_element_by_css_selector('.important') + self.safe_click_by_id("logout") + self.wait_for(lambda: ("Submit for the first time" in self.driver.page_source)) def _source_not_found(self): self.driver.get(self.source_location + "/unlikely") - message = self.driver.find_element_by_id('page-not-found') + message = self.driver.find_element_by_id("page-not-found") assert message.is_displayed() def _source_visits_use_tor(self): @@ -235,12 +204,12 @@ def _source_tor2web_warning(self): def _source_why_journalist_key(self): self.driver.get(self.source_location + "/why-journalist-key") - def _source_waits_for_session_to_timeout(self, session_length_minutes): - time.sleep(session_length_minutes * 60 + 0.1) + def _source_waits_for_session_to_timeout(self): + time.sleep(self.session_expiration + 2) def _source_sees_session_timeout_message(self): - notification = self.driver.find_element_by_css_selector('.important') + notification = self.driver.find_element_by_css_selector(".important") - if not hasattr(self, 'accept_languages'): - expected_text = 'Your session timed out due to inactivity.' + if not hasattr(self, "accept_languages"): + expected_text = "Your session timed out due to inactivity." assert expected_text in notification.text diff --git a/securedrop/tests/functional/step_helpers.py b/securedrop/tests/functional/step_helpers.py deleted file mode 100644 --- a/securedrop/tests/functional/step_helpers.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from os.path import abspath, dirname, join, realpath -import inspect -import traceback - -LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log')) -screenshots_enabled = os.environ.get('SCREENSHOTS_ENABLED') - - -# screenshots is a decorator that records an image before and after -# the steps described in this file -def screenshots(f): - def wrapper(*args, **kwargs): - curframe = inspect.currentframe() - calframe = inspect.getouterframes(curframe, 2) - - locals = calframe[1][0].f_locals - if "testfunction" in locals: - fun = calframe[1][0].f_locals["testfunction"] - class_name = fun.__self__.__class__.__name__ - else: - class_name = calframe[1][0].f_locals["self"].__class__.__name__ - - stack = [x for x in traceback.extract_stack() - if '/tests/functional' in x[0]] - path = ('-'.join([stack[0][0].split('/')[-1], class_name] + - [x[2] for x in stack if x[2] is not 'wrapper'])) - if screenshots_enabled: - image_path = join(LOG_DIR, '%s-before.png' % path) - args[0].driver.save_screenshot(image_path) - result = f(*args, **kwargs) - if screenshots_enabled: - image_path = join(LOG_DIR, '%s-after.png' % path) - args[0].driver.save_screenshot(image_path) - return result - return wrapper diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py --- a/securedrop/tests/functional/test_admin_interface.py +++ b/securedrop/tests/functional/test_admin_interface.py @@ -1,13 +1,11 @@ -import functional_test -import journalist_navigation_steps -from step_helpers import screenshots +from . import functional_test as ft +from . import journalist_navigation_steps class TestAdminInterface( - functional_test.FunctionalTest, + ft.FunctionalTest, journalist_navigation_steps.JournalistNavigationStepsMixin): - @screenshots def test_admin_interface(self): self._admin_logs_in() self._admin_visits_admin_interface() @@ -15,6 +13,35 @@ def test_admin_interface(self): self._new_user_can_log_in() self._admin_can_edit_new_user() + def test_admin_edits_hotp_secret(self): + # Toggle security slider to force prefs change + self.set_tbb_securitylevel(ft.TBB_SECURITY_HIGH) + self.set_tbb_securitylevel(ft.TBB_SECURITY_LOW) + + self._admin_logs_in() + self._admin_visits_admin_interface() + self._admin_adds_a_user() + self._admin_visits_edit_user() + self._admin_visits_reset_2fa_hotp() + self._admin_visits_edit_hotp() + + def test_admin_edits_totp_secret(self): + # Toggle security slider to force prefs change + self.set_tbb_securitylevel(ft.TBB_SECURITY_HIGH) + self.set_tbb_securitylevel(ft.TBB_SECURITY_LOW) + + self._admin_logs_in() + self._admin_visits_admin_interface() + self._admin_adds_a_user() + self._admin_visits_edit_user() + self._admin_visits_reset_2fa_totp() + + def test_admin_deletes_user(self): + self._admin_logs_in() + self._admin_visits_admin_interface() + self._admin_adds_a_user() + self._admin_deletes_user() + def test_admin_updates_image(self): self._admin_logs_in() self._admin_visits_admin_interface() @@ -26,3 +53,11 @@ def test_ossec_alert_button(self): self._admin_visits_admin_interface() self._admin_visits_system_config_page() self._admin_can_send_test_alert() + + def test_admin_adds_admin_user(self): + self._admin_logs_in() + self._admin_visits_admin_interface() + # Add an admin user + self._admin_adds_a_user(is_admin=True) + self._new_admin_user_can_log_in() + self._admin_can_edit_new_user() diff --git a/securedrop/tests/functional/test_journalist.py b/securedrop/tests/functional/test_journalist.py --- a/securedrop/tests/functional/test_journalist.py +++ b/securedrop/tests/functional/test_journalist.py @@ -15,17 +15,30 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # -import source_navigation_steps -import journalist_navigation_steps -import functional_test + +from . import functional_test as ft +from . import journalist_navigation_steps +from . import source_navigation_steps class TestJournalist( - functional_test.FunctionalTest, - source_navigation_steps.SourceNavigationStepsMixin, - journalist_navigation_steps.JournalistNavigationStepsMixin): + ft.FunctionalTest, + source_navigation_steps.SourceNavigationStepsMixin, + journalist_navigation_steps.JournalistNavigationStepsMixin, +): + def test_journalist_verifies_deletion_of_one_submission_modal(self): + # This deletion button is displayed on the individual source page + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_submits_a_file() + self._source_logs_out() + self._journalist_logs_in() + self._journalist_visits_col() + self._journalist_uses_delete_selected_button_confirmation() - def test_journalist_verifies_deletion_of_one_submission_javascript(self): + def test_journalist_uses_col_delete_collection_button_modal(self): + # This delete button is displayed on the individual source page self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() @@ -33,4 +46,33 @@ def test_journalist_verifies_deletion_of_one_submission_javascript(self): self._source_logs_out() self._journalist_logs_in() self._journalist_visits_col() - self._journalist_verifies_deletion_of_one_submission_javascript() + self._journalist_uses_delete_collection_button_confirmation() + + def test_journalist_uses_index_delete_collections_button_modal(self): + # This deletion button is displayed on the index page + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_submits_a_file() + self._source_logs_out() + self._journalist_logs_in() + self._journalist_uses_delete_collections_button_confirmation() + + def test_journalist_interface_ui_with_modal(self): + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_submits_a_file() + self._source_logs_out() + + # Toggle security slider to force prefs change + self.set_tbb_securitylevel(ft.TBB_SECURITY_HIGH) + self.set_tbb_securitylevel(ft.TBB_SECURITY_LOW) + + self._journalist_logs_in() + self._journalist_uses_js_filter_by_sources() + self._journalist_source_selection_honors_filter() + self._journalist_selects_all_sources_then_selects_none() + self._journalist_selects_the_first_source() + self._journalist_uses_js_buttons_to_download_unread() + self._journalist_delete_all_confirmation() diff --git a/securedrop/tests/functional/make_account_changes.py b/securedrop/tests/functional/test_make_account_changes.py similarity index 66% rename from securedrop/tests/functional/make_account_changes.py rename to securedrop/tests/functional/test_make_account_changes.py --- a/securedrop/tests/functional/make_account_changes.py +++ b/securedrop/tests/functional/test_make_account_changes.py @@ -1,14 +1,12 @@ # -*- coding: utf-8 -*- -from unittest import TestCase +from . import journalist_navigation_steps +from . import functional_test -from functional_test import FunctionalTest -from journalist_navigation_steps import JournalistNavigationStepsMixin -from step_helpers import screenshots +class TestMakeAccountChanges( + functional_test.FunctionalTest, + journalist_navigation_steps.JournalistNavigationStepsMixin): -class MakeAccountChanges(FunctionalTest, JournalistNavigationStepsMixin, - TestCase): - @screenshots def test_admin_edit_account_html_template_rendering(self): """The edit_account.html template is used both when an admin is editing a user's account, and when a user is editing their own account. While @@ -18,11 +16,11 @@ def test_admin_edit_account_html_template_rendering(self): self._admin_logs_in() self._admin_visits_admin_interface() # Admin view of admin user - self._edit_user('admin') + self._edit_user(self.admin, True) self._admin_visits_admin_interface() self._admin_adds_a_user() # Admin view of non-admin user - self._edit_user('dellsberg') + self._edit_user(self.new_user['username']) # User view of self self._edit_account() self._logout() diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -1,5 +1,5 @@ -import source_navigation_steps -import functional_test +from . import source_navigation_steps, journalist_navigation_steps +from . import functional_test class TestSourceInterface( @@ -17,3 +17,15 @@ def test_lookup_codename_hint(self): self._source_chooses_to_login() self._source_proceeds_to_login() self._source_sees_no_codename() + + +class TestDownloadKey( + functional_test.FunctionalTest, + journalist_navigation_steps.JournalistNavigationStepsMixin): + + def test_journalist_key_from_source_interface(self): + data = self.return_downloaded_content(self.source_location + + "/journalist-key", None) + + data = data.decode('utf-8') + assert "BEGIN PGP PUBLIC KEY BLOCK" in data diff --git a/securedrop/tests/functional/test_source_metadata.py b/securedrop/tests/functional/test_source_metadata.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/functional/test_source_metadata.py @@ -0,0 +1,10 @@ +from . import source_navigation_steps +from . import functional_test + + +class TestInstanceMetadata( + functional_test.FunctionalTest, + source_navigation_steps.SourceNavigationStepsMixin): + + def test_instance_metadata(self): + self._source_checks_instance_metadata() diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py --- a/securedrop/tests/functional/test_source_notfound.py +++ b/securedrop/tests/functional/test_source_notfound.py @@ -1,5 +1,5 @@ -import source_navigation_steps -import functional_test +from . import source_navigation_steps +from . import functional_test class TestSourceInterfaceNotFound( diff --git a/securedrop/tests/functional/test_source_session_timeout.py b/securedrop/tests/functional/test_source_session_timeout.py --- a/securedrop/tests/functional/test_source_session_timeout.py +++ b/securedrop/tests/functional/test_source_session_timeout.py @@ -1,25 +1,17 @@ -import source_navigation_steps -import functional_test +from . import source_navigation_steps +from . import functional_test class TestSourceSessions( functional_test.FunctionalTest, source_navigation_steps.SourceNavigationStepsMixin): - def setup(self): - # The session expiration here cannot be set to -1 - # as it will trigger an exception in /create. - # Instead, we pick a 1-2s value to allow the account - # to be generated. - self.session_length_minutes = 0.03 - super(TestSourceSessions, self).setup( - session_expiration=self.session_length_minutes) + session_expiration = 5 def test_source_session_timeout(self): self._source_visits_source_homepage() self._source_clicks_submit_documents_on_homepage() self._source_continues_to_submit_page() - self._source_waits_for_session_to_timeout( - self.session_length_minutes) - self._source_visits_source_homepage() + self._source_waits_for_session_to_timeout() + self.driver.refresh() self._source_sees_session_timeout_message() diff --git a/securedrop/tests/functional/test_source_warnings.py b/securedrop/tests/functional/test_source_warnings.py --- a/securedrop/tests/functional/test_source_warnings.py +++ b/securedrop/tests/functional/test_source_warnings.py @@ -1,16 +1,78 @@ -import source_navigation_steps -import functional_test +import os +import shutil +from selenium import webdriver + +from . import functional_test +from . import source_navigation_steps -class TestSourceInterfaceBannerWarnings( - functional_test.FunctionalTest, - source_navigation_steps.SourceNavigationStepsMixin): +class TestSourceInterfaceBannerWarnings( + functional_test.FunctionalTest, source_navigation_steps.SourceNavigationStepsMixin +): def test_warning_appears_if_tor_browser_not_in_use(self): - self.driver.get(self.source_location) + try: + self.switch_to_firefox_driver() + self.driver.get(self.source_location) + + warning_banner = self.driver.find_element_by_id("use-tor-browser") + + assert "It is recommended to use the Tor Browser" in warning_banner.text + + # User should be able to dismiss the warning + warning_dismiss_button = self.driver.find_element_by_id("use-tor-browser-close") + self.banner_is_dismissed(warning_banner, warning_dismiss_button) + finally: + self.switch_to_torbrowser_driver() + + def test_warning_appears_if_orbot_is_used(self): + orbotUserAgent = "Mozilla/5.0 (Android; Mobile;" " rv:52.0) Gecko/20100101 Firefox/52.0" + + self.f_profile_path2 = "/tmp/testprofile2" + if os.path.exists(self.f_profile_path2): + shutil.rmtree(self.f_profile_path2) + # Create new profile and driver with the orbot user agent for this test + os.mkdir(self.f_profile_path2) + profile = webdriver.FirefoxProfile(self.f_profile_path2) + profile.set_preference("general.useragent.override", orbotUserAgent) + if self.journalist_location.find(".onion") != -1: + # set FF preference to socks proxy in Tor Browser + profile.set_preference("network.proxy.type", 1) + profile.set_preference("network.proxy.socks", "127.0.0.1") + profile.set_preference("network.proxy.socks_port", 9150) + profile.set_preference("network.proxy.socks_version", 5) + profile.set_preference("network.proxy.socks_remote_dns", True) + profile.set_preference("network.dns.blockDotOnion", False) + profile.update_preferences() + self.driver2 = webdriver.Firefox( + firefox_binary=functional_test.FIREFOX_PATH, firefox_profile=profile + ) + self.driver2.get(self.source_location) + + currentAgent = self.driver2.execute_script("return navigator.userAgent") + assert currentAgent == orbotUserAgent - warning_banner = self.driver.find_element_by_class_name( - 'use-tor-browser') + warning_banner = self.driver2.find_element_by_id("orfox-browser") + + assert "It is recommended you use the desktop version of Tor Browser" in warning_banner.text + + # User should be able to dismiss the warning + warning_dismiss_button = self.driver2.find_element_by_id("orfox-browser-close") + self.banner_is_dismissed(warning_banner, warning_dismiss_button) + + self.driver2.quit() + + def banner_is_dismissed(self, warning_banner, dismiss_button): + + dismiss_button.click() + + def warning_banner_is_hidden(): + assert warning_banner.is_displayed() is False + + self.wait_for(warning_banner_is_hidden) + + def test_warning_high_security(self): + self.driver.get(self.source_location) - assert ("We recommend using Tor Browser to access SecureDrop" in - warning_banner.text) + banner = self.driver.find_element_by_id("js-warning") + assert "Security Slider to Safest", banner.text diff --git a/securedrop/tests/functional/test_submission_not_in_memory.py b/securedrop/tests/functional/test_submission_not_in_memory.py deleted file mode 100644 --- a/securedrop/tests/functional/test_submission_not_in_memory.py +++ /dev/null @@ -1,66 +0,0 @@ -from functional_test import FunctionalTest -import subprocess -from source_navigation_steps import SourceNavigationStepsMixin -import os -import pytest -import getpass -import re - - -class TestSubmissionNotInMemory(FunctionalTest, - SourceNavigationStepsMixin): - - def setup(self): - self.devnull = open('/dev/null', 'r') - FunctionalTest.setup(self) - - def teardown(self): - FunctionalTest.teardown(self) - - def _memory_dump(self, pid): - core_dump_base_name = '/tmp/core_dump' - core_dump_file_name = core_dump_base_name + '.' + pid - try: - subprocess.call(["sudo", "gcore", "-o", - core_dump_base_name, pid], stdout=self.devnull, - stderr=self.devnull) - subprocess.call(["sudo", "chown", getpass.getuser(), - core_dump_file_name]) - with open(core_dump_file_name, 'r') as fp: - return fp.read() - finally: - pass - os.remove(core_dump_file_name) - - def _num_strings_in(self, needle, haystack): - return sum(1 for _ in re.finditer(re.escape(needle), haystack)) - - @pytest.mark.xfail() - def test_message_is_not_retained_in_memory(self): - self._source_visits_source_homepage() - self._source_chooses_to_submit_documents() - self._source_continues_to_submit_page() - self._source_submits_a_message() - - source_server_pid = str(self.source_process.pid) - - memory_dump = self._memory_dump(source_server_pid) - secrets_in_memory = self._num_strings_in(self.secret_message, - memory_dump) - - assert secrets_in_memory < 1 - - @pytest.mark.xfail() - def test_file_upload_is_not_retained_in_memory(self): - self._source_visits_source_homepage() - self._source_chooses_to_submit_documents() - self._source_continues_to_submit_page() - self._source_submits_a_file() - - source_server_pid = str(self.source_process.pid) - - memory_dump = self._memory_dump(source_server_pid) - secrets_in_memory = self._num_strings_in(self.secret_message, - memory_dump) - - assert secrets_in_memory < 1 diff --git a/securedrop/tests/functional/test_submit_and_retrieve_file.py b/securedrop/tests/functional/test_submit_and_retrieve_file.py --- a/securedrop/tests/functional/test_submit_and_retrieve_file.py +++ b/securedrop/tests/functional/test_submit_and_retrieve_file.py @@ -1,7 +1,6 @@ -import source_navigation_steps -import journalist_navigation_steps -import functional_test -from step_helpers import screenshots +from . import source_navigation_steps +from . import journalist_navigation_steps +from . import functional_test class TestSubmitAndRetrieveFile( @@ -9,31 +8,31 @@ class TestSubmitAndRetrieveFile( source_navigation_steps.SourceNavigationStepsMixin, journalist_navigation_steps.JournalistNavigationStepsMixin): - @screenshots def test_submit_and_retrieve_happy_path(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() self._source_submits_a_file() self._source_logs_out() - self._journalist_logs_in() - self._journalist_checks_messages() - self._journalist_stars_and_unstars_single_message() - self._journalist_selects_all_sources_then_selects_none() - self._journalist_downloads_message() - self._journalist_sends_reply_to_source() + try: + self.switch_to_firefox_driver() + self._journalist_logs_in() + self._journalist_checks_messages() + self._journalist_stars_and_unstars_single_message() + self._journalist_downloads_message() + self._journalist_sends_reply_to_source() + finally: + self.switch_to_torbrowser_driver() self._source_visits_source_homepage() self._source_chooses_to_login() self._source_proceeds_to_login() self._source_deletes_a_journalist_reply() - @screenshots def test_source_cancels_at_login_page(self): self._source_visits_source_homepage() self._source_chooses_to_login() self._source_hits_cancel_at_login_page() - @screenshots def test_source_cancels_at_submit_page(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() diff --git a/securedrop/tests/functional/test_submit_and_retrieve_message.py b/securedrop/tests/functional/test_submit_and_retrieve_message.py --- a/securedrop/tests/functional/test_submit_and_retrieve_message.py +++ b/securedrop/tests/functional/test_submit_and_retrieve_message.py @@ -1,7 +1,6 @@ -import functional_test -import source_navigation_steps -import journalist_navigation_steps -from step_helpers import screenshots +from . import functional_test +from . import source_navigation_steps +from . import journalist_navigation_steps class TestSubmitAndRetrieveMessage( @@ -9,13 +8,14 @@ class TestSubmitAndRetrieveMessage( source_navigation_steps.SourceNavigationStepsMixin, journalist_navigation_steps.JournalistNavigationStepsMixin): - @screenshots def test_submit_and_retrieve_happy_path(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() self._source_submits_a_message() self._source_logs_out() + self.switch_to_firefox_driver() self._journalist_logs_in() self._journalist_checks_messages() self._journalist_downloads_message() + self.switch_to_torbrowser_driver() diff --git a/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/de_DE.po b/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/de_DE.po new file mode 100644 --- /dev/null +++ b/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/de_DE.po @@ -0,0 +1,27 @@ +# German translations for PACKAGE package. +# Copyright (C) 2017 Freedom of the Press Foundation +# This file is distributed under the same license as the PACKAGE package. +# Automatically generated, 2017. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: [email protected]\n" +"PO-Revision-Date: 2017-12-01 13:53+0000\n" +"Last-Translator: kwadronaut <[email protected]>\n" +"Language-Team: German " +"<https://weblate.securedrop.org/projects/securedrop/desktop/de/>\n" +"Language: de_DE\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" +"X-Generator: Weblate 2.17.1\n" + +#: desktop-journalist-icon.j2.in:9 +msgid "SecureDrop Journalist Interface" +msgstr "Journalistenschnittstelle für SecureDrop" + +#: desktop-source-icon.j2.in:9 +msgid "SecureDrop Source Interface" +msgstr "Quellenschnittstelle für SecureDrop" diff --git a/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/nl.po b/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/nl.po new file mode 100644 --- /dev/null +++ b/securedrop/tests/i18n/install_files/ansible-base/roles/tails-config/templates/nl.po @@ -0,0 +1,27 @@ +# Dutch translations for PACKAGE package. +# Copyright (C) 2017 Freedom of the Press Foundation +# This file is distributed under the same license as the PACKAGE package. +# Automatically generated, 2017. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: [email protected]\n" +"PO-Revision-Date: 2017-12-01 13:48+0000\n" +"Last-Translator: kwadronaut <[email protected]>\n" +"Language-Team: Dutch " +"<https://weblate.securedrop.org/projects/securedrop/desktop/nl/>\n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" +"X-Generator: Weblate 2.17.1\n" + +#: desktop-journalist-icon.j2.in:9 +msgid "SecureDrop Journalist Interface" +msgstr "SecureDrop - interface voor journalisten" + +#: desktop-source-icon.j2.in:9 +msgid "SecureDrop Source Interface" +msgstr "Securedrop - interface voor bronnen" diff --git a/securedrop/tests/i18n/securedrop/translations/de_DE/LC_MESSAGES/messages.po b/securedrop/tests/i18n/securedrop/translations/de_DE/LC_MESSAGES/messages.po new file mode 100644 --- /dev/null +++ b/securedrop/tests/i18n/securedrop/translations/de_DE/LC_MESSAGES/messages.po @@ -0,0 +1,1399 @@ +msgid "" +msgstr "" +"Project-Id-Version: SecureDrop \\'0.3.5\\'\n" +"Report-Msgid-Bugs-To: [email protected]\n" +"POT-Creation-Date: 2017-09-02 07:28+0000\n" +"PO-Revision-Date: 2018-01-10 08:03+0000\n" +"Last-Translator: kwadronaut <[email protected]>\n" +"Language-Team: German " +"<https://weblate.securedrop.org/projects/securedrop/securedrop/de/>\n" +"Language: de_DE\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" +"X-Generator: Weblate 2.18\n" +"Generated-By: Babel 2.4.0\n" + +#: template_filters.py:14 +msgid "{time} ago" +msgstr "vor {time}" + +#: journalist_app/__init__.py:34 journalist_app/__init__.py:76 +msgid "You have been logged out due to inactivity" +msgstr "Sie wurden wegen Inaktivität abgemeldet" + +#: journalist_app/account.py:26 +msgid "Incorrect password or two-factor code." +msgstr "Falsches Passwort oder Zwei-Faktor-Authentifizierungstoken." + +#: journalist_app/account.py:42 +msgid "Token in two-factor authentication verified." +msgstr "Token in Zwei-Faktor-Authentifizierung verifiziert." + +#: journalist_app/account.py:46 journalist_app/admin.py:108 +msgid "Could not verify token in two-factor authentication." +msgstr "Token in Zwei-Faktor-Authentifizierung konnte nicht verifiziert werden." + +#: journalist_app/admin.py:37 +msgid "Image updated." +msgstr "Bild aktualisiert." + +#: journalist_app/admin.py:63 +msgid "" +"There was an error with the autogenerated password. User not created. Please " +"try again." +msgstr "" +"Es gab einen Fehler mit dem automatisch generierten Passwort. Der Benutzer " +"wurde nicht erstellt. Bitte versuchen Sie es erneut." + +#: journalist_app/admin.py:74 +msgid "That username is already in use" +msgstr "Dieser Benutzername ist bereits vergeben" + +#: journalist_app/admin.py:77 +msgid "" +"An error occurred saving this user to the database. Please inform your " +"administrator." +msgstr "" +"Beim Speichern dieses Benutzers in der Datenbank ist ein Fehler aufgetreten. " +"Bitte informieren Sie Ihren Administrator." + +#: journalist_app/admin.py:101 +msgid "Token in two-factor authentication accepted for user {user}." +msgstr "" +"Zwei-Faktor-Authentifizierungstoken für Benutzer {user} wurde akzeptiert." + +#: journalist_app/admin.py:135 +msgid "Invalid secret format: please only submit letters A-F and numbers 0-9." +msgstr "" +"Ungültiges Passwort-Format: Bitten geben Sie nur die Buchstaben A-F und " +"Ziffern 0-9 ein." + +#: journalist_app/admin.py:140 +msgid "Invalid secret format: odd-length secret. Did you mistype the secret?" +msgstr "" +"Ungültiges Geheimnisformat: ungerade Geheimnislänge. Haben Sie das Geheimnis " +"falsch eingetippt?" + +#: journalist_app/admin.py:145 journalist_app/main.py:107 +#: journalist_app/utils.py:38 +msgid "An unexpected error occurred! Please inform your administrator." +msgstr "" +"Ein unerwarteter Fehler ist aufgetreten! Bitte Informieren Sie Ihren " +"Administrator." + +#: journalist_app/admin.py:178 +msgid "Username \"{user}\" already taken." +msgstr "Benutzername \"{user}\" ist bereits vergeben." + +#: journalist_app/admin.py:214 +msgid "Deleted user '{user}'" +msgstr "Benutzer '{user}' wurde gelöscht" + +#: journalist_app/col.py:47 +msgid "{source_name}'s collection deleted" +msgstr "{source_name}s Sammlung wurde gelöscht" + +#: journalist_app/col.py:58 +msgid "No collections selected." +msgstr "Keine Sammlungen ausgewählt." + +#: journalist_app/decorators.py:16 +msgid "Only administrators can access this page." +msgstr "Nur Administratoren haben Zugriff auf diese Seite." + +#: journalist_app/forms.py:19 +msgid "Field must be 40 characters long but got {num_chars}." +msgstr "Das Feld muss 40 Zeichen lang sein, hat aber {num_chars}." + +#: journalist_app/forms.py:30 +msgid "" +"Field must be at least {min_chars} characters long but only got {num_chars}." +msgstr "" +"Das Feld muss mindestens {min_chars} Zeichen lang sein, enthält aber nur " +"{num_chars}." + +#: journalist_app/forms.py:35 source_app/forms.py:11 +msgid "This field is required." +msgstr "Dieses Feld ist erforderlich." + +#: journalist_app/forms.py:52 +msgid "You cannot send an empty reply." +msgstr "Sie können keine leere Antwort senden." + +#: journalist_app/forms.py:60 +msgid "File required." +msgstr "Datei erforderlich." + +#: journalist_app/forms.py:62 +msgid "Upload images only." +msgstr "Laden Sie nur Bilder hoch." + +#: journalist_app/main.py:118 +msgid "Thanks. Your reply has been stored." +msgstr "Vielen Dank. Ihre Antwort wurde gespeichert." + +#: journalist_app/main.py:139 +msgid "No collections selected for download." +msgstr "Es sind keine Sammlungen zum Herunterladen ausgewählt." + +#: journalist_app/main.py:142 journalist_app/utils.py:189 +msgid "No collections selected for deletion." +msgstr "Es sind keine Sammlungen zum Löschen ausgewählt." + +#: journalist_app/main.py:168 +msgid "The source '{original_name}' has been renamed to '{new_name}'" +msgstr "Die Quelle wurde von '{original_name}' zu '{new_name}' umbenannt" + +#: journalist_app/main.py:183 +msgid "No unread submissions for this source." +msgstr "Keine ungelesenen Einreichungen für diese Quelle." + +#: journalist_app/utils.py:45 +msgid "Account updated." +msgstr "Konto aktualisiert." + +#: journalist_app/utils.py:76 +msgid "Login failed." +msgstr "Anmeldung fehlgeschlagen." + +#: journalist_app/utils.py:84 +msgid "Please wait at least {seconds} second before logging in again." +msgid_plural "Please wait at least {seconds} seconds before logging in again." +msgstr[0] "" +"Bitte warten Sie mindestens {seconds} Sekunde, bevor Sie sich erneut " +"anmelden." +msgstr[1] "" +"Bitte warten Sie mindestens {seconds} Sekunden, bevor Sie sich erneut " +"anmelden." + +#: journalist_app/utils.py:95 +msgid "Please wait for a new two-factor token before trying again." +msgstr "" +"Bitte warten Sie auf ein neues Zwei-Faktor-Token, bevor Sie erneut versuchen." + +#: journalist_app/utils.py:141 +msgid "Submission deleted." +msgid_plural "{num} submissions deleted." +msgstr[0] "Einreichung gelöscht." +msgstr[1] "{num} Einreichungen gelöscht." + +#: journalist_app/utils.py:194 +msgid "{num} collection deleted" +msgid_plural "{num} collections deleted" +msgstr[0] "{num} Sammlung gelöscht" +msgstr[1] "{num} Sammlungen gelöscht" + +#: journalist_app/utils.py:229 +msgid "You submitted a bad password! Password not changed." +msgstr "Sie haben ein falsches Passwort eingegeben! Passwort nicht geändert." + +#: journalist_app/utils.py:236 +msgid "" +"There was an error, and the new password might not have been saved " +"correctly. To prevent you from getting locked out of your account, you " +"should reset your password again." +msgstr "" +"Es gab einen Fehler und das neue Passwort wurde möglicherweise nicht korrekt " +"gespeichert. Um zu verhindern, dass Sie aus Ihrem Konto ausgeschlossen " +"werden, sollten Sie Ihr Passwort erneut zurücksetzen." + +#: journalist_app/utils.py:245 +msgid "" +"Password updated. Don't forget to save it in your KeePassX database. New " +"password:" +msgstr "" +"Das Passwort wurde erfolgreich aktualisiert. Vergessen Sie nicht, es in " +"Ihrer KeePassX-Datenbank zu speichern. Das neue Passwort ist:" + +#: journalist_app/utils.py:262 +msgid "No unread submissions in selected collections." +msgstr "Keine ungelesenen Einreichungen in den ausgewählten Sammlungen." + +#: journalist_templates/_source_row.html:19 +msgid "1 doc" +msgid_plural "{doc_num} docs" +msgstr[0] "1 Dok" +msgstr[1] "{doc_num} Dok" + +#: journalist_templates/_source_row.html:20 +msgid "1 message" +msgid_plural "{msg_num} messages" +msgstr[0] "1 Nachricht" +msgstr[1] "{msg_num} Nachrichten" + +#: journalist_templates/_source_row.html:23 +msgid "{num_unread} unread" +msgstr "{num_unread} ungelesen" + +#: journalist_templates/account_edit_hotp_secret.html:6 +#: journalist_templates/admin_edit_hotp_secret.html:7 +msgid "Change Secret" +msgstr "Geheimnis ändern" + +#: journalist_templates/account_edit_hotp_secret.html:7 +#: journalist_templates/admin_add_user.html:33 +#: journalist_templates/admin_edit_hotp_secret.html:8 +msgid "HOTP Secret" +msgstr "HOTP-Geheimnis" + +#: journalist_templates/account_edit_hotp_secret.html:9 +#: journalist_templates/admin_edit_hotp_secret.html:10 +#: source_templates/login.html:23 +msgid "CONTINUE" +msgstr "FORTSETZEN" + +#: journalist_templates/account_new_two_factor.html:4 +#: journalist_templates/admin_new_user_two_factor.html:5 +msgid "Enable FreeOTP" +msgstr "FreeOTP aktivieren" + +#: journalist_templates/account_new_two_factor.html:5 +msgid "" +"You're almost done! To finish resetting your two-factor authentication, " +"follow the instructions below to set up FreeOTP. Once you've added the entry " +"for your account in the app, enter one of the 6-digit codes from the app to " +"confirm that two factor authentication is set up correctly." +msgstr "" +"Sie sind fast fertig! Um das Zurücksetzen der Zwei-Faktor-Authentifizierung " +"abzuschließen, folgen Sie den Anweisungen unten, um FreeOTP einzurichten. " +"Nachdem Sie den Eintrag für Ihr Konto in der App hinzugefügt haben, geben " +"Sie einen der 6-stelligen Codes aus der App ein, um zu bestätigen, dass die " +"Zwei-Faktor-Authentifizierung korrekt eingerichtet ist." + +#: journalist_templates/account_new_two_factor.html:8 +#: journalist_templates/admin_new_user_two_factor.html:9 +msgid "Install FreeOTP on your phone" +msgstr "Installieren Sie FreeOTP auf Ihrem Handy" + +#: journalist_templates/account_new_two_factor.html:9 +#: journalist_templates/admin_new_user_two_factor.html:10 +msgid "Open the FreeOTP app" +msgstr "Öffnen Sie die FreeOTP-App" + +#: journalist_templates/account_new_two_factor.html:10 +#: journalist_templates/admin_new_user_two_factor.html:11 +msgid "Tap the QRcode symbol at the top" +msgstr "Tippen Sie auf das QRcode-Symbol am oberen Rand" + +#: journalist_templates/account_new_two_factor.html:11 +#: journalist_templates/admin_new_user_two_factor.html:12 +msgid "" +"Your phone will now be in \"scanning\" mode. When you are in this mode, scan " +"the barcode below:" +msgstr "" +"Ihr Handy geht nun in den \"Scan-Modus\". Scannen Sie in diesem Modus den " +"Barcode unten ein:" + +#: journalist_templates/account_new_two_factor.html:14 +#: journalist_templates/admin_new_user_two_factor.html:15 +msgid "Can't scan the barcode? Enter the following code manually:" +msgstr "" +"Können Sie den Barcode nicht scannen? Geben Sie manuell den folgenden Code " +"ein:" + +#: journalist_templates/account_new_two_factor.html:15 +#: journalist_templates/admin_new_user_two_factor.html:18 +msgid "Once you have scanned the barcode, enter the 6-digit code below:" +msgstr "" +"Geben Sie unten den folgenden 6-stelligen Code ein, sobald Sie den Barcode " +"gescannt haben:" + +#: journalist_templates/account_new_two_factor.html:17 +#: journalist_templates/admin_new_user_two_factor.html:20 +msgid "Enable YubiKey (OATH-HOTP)" +msgstr "YubiKey aktivieren (OATH-HOTP)" + +#: journalist_templates/account_new_two_factor.html:18 +#: journalist_templates/admin_new_user_two_factor.html:21 +msgid "Once you have configured your YubiKey, enter the 6-digit code below:" +msgstr "" +"Geben sie den folgenden 6-stelligen Code unten ein, sobald Sie Ihren YubiKey " +"konfiguriert haben:" + +#: journalist_templates/account_new_two_factor.html:22 +#: journalist_templates/admin_new_user_two_factor.html:25 +msgid "Verification code" +msgstr "Bestätigungscode" + +#: journalist_templates/account_new_two_factor.html:24 +#: journalist_templates/admin_new_user_two_factor.html:27 +#: journalist_templates/col.html:83 source_templates/lookup.html:45 +msgid "SUBMIT" +msgstr "EINREICHEN" + +#: journalist_templates/admin.html:3 +msgid "Admin Interface" +msgstr "Adminoberfläche" + +#: journalist_templates/admin.html:6 +#: journalist_templates/admin_add_user.html:39 +msgid "ADD USER" +msgstr "BENUTZER HINZUFÜGEN" + +#: journalist_templates/admin.html:16 +#: journalist_templates/admin_add_user.html:14 +#: journalist_templates/login.html:8 +msgid "Username" +msgstr "Benutzername" + +#: journalist_templates/admin.html:17 +msgid "Edit" +msgstr "Bearbeiten" + +#: journalist_templates/admin.html:18 journalist_templates/index.html:15 +msgid "Delete" +msgstr "Löschen" + +#: journalist_templates/admin.html:19 +msgid "Created" +msgstr "Erstellt" + +#: journalist_templates/admin.html:20 +msgid "Last login" +msgstr "Letzte Anmeldung" + +#: journalist_templates/admin.html:25 +msgid "Edit user {username}" +msgstr "Benutzer {username} bearbeiten" + +#: journalist_templates/admin.html:26 +msgid "Delete user {username}" +msgstr "Benutzer {username} löschen" + +#: journalist_templates/admin.html:31 +msgid "never" +msgstr "niemals" + +#: journalist_templates/admin.html:38 +msgid "No users to display" +msgstr "Keine Benutzer anzuzeigen" + +#: journalist_templates/admin.html:44 +msgid "INSTANCE CONFIG" +msgstr "INSTANZKONFIGURATION" + +#: journalist_templates/admin_add_user.html:4 +#: journalist_templates/config.html:4 journalist_templates/edit_account.html:7 +msgid "Back to admin interface" +msgstr "Zurück zur Adminoberfläche" + +#: journalist_templates/admin_add_user.html:21 +msgid "The user's password will be:" +msgstr "Das Benutzerpasswort lautet:" + +#: journalist_templates/admin_add_user.html:24 +#: journalist_templates/edit_account.html:17 +msgid "Is Administrator" +msgstr "Ist Administrator" + +#: journalist_templates/admin_add_user.html:32 +msgid "Is using a YubiKey [HOTP]" +msgstr "Benutzt einen YubiKey [HOTP]" + +#: journalist_templates/admin_new_user_two_factor.html:6 +msgid "" +"You're almost done! To finish adding this new user, have them follow the " +"instructions below to set up two-factor authentication with FreeOTP. Once " +"they've added an entry for this account in the app, have them enter one of " +"the 6-digit codes from the app to confirm that two factor authentication is " +"set up correctly." +msgstr "" +"Sie sind fast fertig! Um das Hinzufügen dieses neuen Nutzers abzuschließen, " +"muss dieser den untenstehenden Anweisungen folgen, um die Zwei-Faktor-" +"Authentifizierung mit FreeOTP einzurichten. Sobald der Nutzer in der App " +"einen Eintrag für dieses Konto hinzugefügt hat, muss er einen der 6-" +"stelligen Codes aus der App eingeben, um zu bestätigen, dass die Zwei-Faktor-" +"Authentifizierung erfolgreich eingerichtet ist." + +#: journalist_templates/base.html:24 +msgid "Logged on as" +msgstr "Angemeldet als" + +#: journalist_templates/base.html:26 +msgid "Admin" +msgstr "Admin" + +#: journalist_templates/base.html:28 +msgid "Log Out" +msgstr "Abmelden" + +#: journalist_templates/base.html:40 +msgid "" +"Powered by <br> <img src=\"/static/i/securedrop_small.png\" alt=\"SecureDrop" +"\">" +msgstr "" +"Ermöglicht von<br> <img src=\"/static/i/securedrop_small.png\" alt=" +"\"SecureDrop\">" + +#: journalist_templates/base.html:54 +msgid "Powered by <em>SecureDrop {version}</em>." +msgstr "Ermöglicht von <em>SecureDrop {version}</em>." + +#: journalist_templates/col.html:10 +msgid "All Sources" +msgstr "Alle Quellen" + +#: journalist_templates/col.html:13 +msgid "" +"Generate a new random codename for this source. We recommend doing this if " +"the first random codename is difficult to say or remember. You can generate " +"new random codenames as many times as you like." +msgstr "" +"Einen neuen zufälligen Deckname für diese Quelle generieren. Wir empfehlen " +"das, falls der erste Deckname schwer auszusprechen oder zu merken ist. Sie " +"können beliebig viele neue zufällige Decknamen erzeugen und so oft Sie " +"wollen." + +#: journalist_templates/col.html:13 +msgid "Change codename" +msgstr "Deckname ändern" + +#: journalist_templates/col.html:14 +msgid "Are you sure you want to generate a new codename?" +msgstr "Sind Sie sicher, dass Sie einen neuen Decknamen generieren möchten?" + +#: journalist_templates/col.html:15 source_templates/lookup.html:72 +msgid "Cancel" +msgstr "Abbrechen" + +#: journalist_templates/col.html:16 +msgid "CONFIRM" +msgstr "BESTÄTIGEN" + +#: journalist_templates/col.html:22 +msgid "" +"The documents are stored encrypted for security. To read them, you will need " +"to decrypt them using GPG." +msgstr "" +"Die Dokumente werden aus Sicherheitsgründen verschlüsselt gespeichert. Damit " +"Sie sie lesen können, müssen Sie sie mit GPG entschlüsseln." + +#: journalist_templates/col.html:26 +msgid "Download Selected" +msgstr "Herunterladen ausgewählt" + +#: journalist_templates/col.html:27 +msgid "Delete Selected" +msgstr "Löschen ausgewählt" + +#: journalist_templates/col.html:55 +msgid "Uploaded Document" +msgstr "Hochgeladenes Dokument" + +#: journalist_templates/col.html:57 journalist_templates/col.html:75 +msgid "Reply" +msgstr "Antworten" + +#: journalist_templates/col.html:59 +msgid "Message" +msgstr "Nachricht" + +#: journalist_templates/col.html:70 +msgid "No documents to display." +msgstr "Keine Dokumente anzuzeigen." + +#: journalist_templates/col.html:77 +msgid "" +"You can write a secure reply to the person who submitted these documents:" +msgstr "" +"Sie können der Person, die diese Dokumente eingereicht hat, eine sichere " +"Antwort schreiben:" + +#: journalist_templates/col.html:86 +msgid "You've flagged this source for reply." +msgstr "Sie haben diese Quelle für eine Antwort markiert." + +#: journalist_templates/col.html:87 +msgid "" +"An encryption key will be generated for the source the next time they log " +"in, after which you will be able to reply to the source here." +msgstr "" +"Beim nächsten Anmelden wird für die Quelle ein Verschlüsselungsschlüssel " +"generiert, nach dem Sie auf die Quelle antworten können." + +#: journalist_templates/col.html:89 +msgid "Click below if you would like to write a reply to this source." +msgstr "" +"Klicken Sie hier, wenn Sie dieser Quelle eine Antwort schreiben möchten." + +#: journalist_templates/col.html:93 +msgid "FLAG THIS SOURCE FOR REPLY" +msgstr "DIESE QUELLE FÜR EINE ANTWORT MERKEN" + +#: journalist_templates/col.html:98 +msgid "" +"Click below to delete this source's collection. <em>Warning: If you do this, " +"the files seen here will be unrecoverable and the source will no longer be " +"able to login using their previous codename.</em>" +msgstr "" +"Klicken Sie hier, um die Sammlung dieser Quelle zu löschen. <em>Warnung: Die " +"Dateien können anschließend nicht wiederhergestellt werden und die Quelle " +"kann sich nicht mehr mit ihrem früheren Decknamen anmelden.</em>" + +#: journalist_templates/col.html:104 +msgid "DELETE COLLECTION" +msgstr "SAMMLUNG LÖSCHEN" + +#: journalist_templates/config.html:7 +msgid "Instance Configuration" +msgstr "Instanzkonfiguration" + +#: journalist_templates/config.html:9 +msgid "Alerts" +msgstr "Alarme" + +#: journalist_templates/config.html:11 +msgid "Send a test encrypted email alert to verify OSSEC is working correctly:" +msgstr "" +"Senden Sie eine testweise verschlüsselte E-Mail-Benachrichtigung, um zu " +"überprüfen, ob OSSEC korrekt funktioniert:" + +#: journalist_templates/config.html:15 +msgid "SEND TEST OSSEC ALERT" +msgstr "TESTWEISEN OSSEC-ALARM SENDEN" + +#: journalist_templates/config.html:21 +msgid "Logo Image" +msgstr "Logobild" + +#: journalist_templates/config.html:23 +msgid "" +"Here you can update the image displayed on the SecureDrop web interfaces:" +msgstr "" +"Hier können Sie das Bild, das auf den SecureDrop-Weboberflächen angezeigt " +"wird, aktualisieren:" + +#: journalist_templates/config.html:39 +msgid "UPDATE LOGO" +msgstr "LOGO AKTUALISIEREN" + +#: journalist_templates/delete.html:5 +msgid "" +"The following file has been selected for <strong>permanent deletion</strong>:" +msgid_plural "" +"The following {files} files have been selected for <strong>permanent " +"deletion</strong>:" +msgstr[0] "" +"Die folgende Datei wurde zum <strong>endgültigen Löschen</strong> ausgewählt:" +msgstr[1] "" +"Die folgenden {files} Dateien wurden zum <strong>endgültigen Löschen</" +"strong> ausgewählt:" + +#: journalist_templates/delete.html:20 +msgid "PERMANENTLY DELETE FILES" +msgstr "DATEIEN PERMANENT LÖSCHEN" + +#: journalist_templates/delete.html:23 +msgid "Return to the list of documents for {source_name}…" +msgstr "Zurück zur Dokumentenliste für {source_name}…" + +#: journalist_templates/edit_account.html:6 +msgid "Edit user \"{user}\"" +msgstr "Benutzer \"{user}\" bearbeiten" + +#: journalist_templates/edit_account.html:8 +msgid "Change Username &amp; Admin Status" +msgstr "Benutzernamen &amp; Adminstatus ändern" + +#: journalist_templates/edit_account.html:12 +msgid "Change username" +msgstr "Benutzername ändern" + +#: journalist_templates/edit_account.html:19 +msgid "UPDATE" +msgstr "AKTUALISIEREN" + +#: journalist_templates/edit_account.html:22 +msgid "Edit your account" +msgstr "Ihr Konto bearbeiten" + +#: journalist_templates/edit_account.html:25 +msgid "Reset Password" +msgstr "Passwort zurücksetzen" + +#: journalist_templates/edit_account.html:27 +msgid "SecureDrop now uses automatically generated diceware passwords." +msgstr "SecureDrop benutzt jetzt automatisch generierte diceware-Passwörter." + +#: journalist_templates/edit_account.html:28 +msgid "" +"Your password will be changed immediately, so you will need to save it " +"before pressing the \"Reset Password\" button." +msgstr "" +"Ihr Passwort wird sofort geändert. Bevor Sie die Schaltfläche \"Passwort " +"zurücksetzen\" drücken, müssen Sie es speichern." + +#: journalist_templates/edit_account.html:34 +msgid "Please enter your current password and two-factor code." +msgstr "Bitte geben Sie Ihr aktuelles Passwort und Zwei-Faktor-Code ein." + +#: journalist_templates/edit_account.html:40 +msgid "Current Password" +msgstr "Aktuelles Passwort" + +#: journalist_templates/edit_account.html:41 journalist_templates/login.html:10 +msgid "Two-factor Code" +msgstr "Zwei-Faktor-Code" + +#: journalist_templates/edit_account.html:46 +msgid "The user's password will be changed to:" +msgstr "Das Benutzerpasswort wird geändert zu:" + +#: journalist_templates/edit_account.html:48 +msgid "Your password will be changed to:" +msgstr "Ihr Passwort wird geändert zu:" + +#: journalist_templates/edit_account.html:53 +msgid "RESET PASSWORD" +msgstr "PASSWORT ZURÜCKSETZEN" + +#: journalist_templates/edit_account.html:58 +msgid "Reset Two-Factor Authentication" +msgstr "Zwei-Faktor-Authentifizierung zurücksetzen" + +#: journalist_templates/edit_account.html:61 +msgid "" +"If a user's two-factor authentication credentials have been lost or " +"compromised, you can reset them here. <em>If you do this, make sure the user " +"is present and ready to set up their device with the new two-factor " +"credentials. Otherwise, they will be locked out of their account." +msgstr "" +"Wenn die Anmeldedaten eines Benutzers für die Zwei-Faktor-Authentifizierung " +"verloren oder kompromittiert wurden, können Sie sie hier zurücksetzen. " +"<em>Wenn Sie das tun, stellen Sie sicher, dass der Benutzer anwesend ist und " +"sein Gerät mit den neuen Zwei-Faktor-Anmeldedaten einrichten kann. Sonst " +"verliert der Benutzer den Zugriff auf sein Konto." + +#: journalist_templates/edit_account.html:63 +msgid "" +"If your two-factor authentication credentials have been lost or compromised, " +"or you got a new device, you can reset your credentials here. <em>If you do " +"this, make sure you are ready to set up your new device, otherwise you will " +"be locked out of your account.</em>" +msgstr "" +"Wenn Ihre Anmeldedaten für die Zwei-Faktor-Authentifizierung verloren oder " +"kompromittiert wurden oder Sie ein neues Gerät nutzen, können Sie Ihre " +"Anmeldedaten hier zurücksetzen. <em>Wenn Sie das tun, stellen Sie sicher, " +"dass Sie bereit sind, Ihr neues Gerät einzurichten, sonst verlieren Sie den " +"Zugriff auf Ihr Konto.</em>" + +#: journalist_templates/edit_account.html:65 +msgid "" +"To reset two-factor authentication for mobile apps such as FreeOTP, choose " +"the first option. For hardware tokens like the Yubikey, choose the second." +msgstr "" +"Wählen Sie die erste Option, um die Zwei-Faktor-Authentifizierung für Mobile-" +"Apps wie FreeOTP zurückzusetzen. Für Hardware-Token wie Yubikey wählen Sie " +"die zweite Option." + +#: journalist_templates/edit_account.html:85 +msgid "RESET TWO-FACTOR AUTHENTICATION (APP)" +msgstr "ZWEI-FAKTOR-AUTHENTIFIZIERUNG ZURÜCKSETZEN (APP)" + +#: journalist_templates/edit_account.html:87 +msgid "RESET TWO-FACTOR AUTHENTICATION (HARDWARE TOKEN)" +msgstr "ZWEI-FAKTOR-AUTHENTIFIZIERUNG ZURÜCKSETZEN (HARDWARE-TOKEN)" + +#: journalist_templates/flag.html:5 +msgid "Thanks!" +msgstr "Danke!" + +#: journalist_templates/flag.html:8 +msgid "" +"SecureDrop will generate a secure encryption key for this source the next " +"time that they log in. Once the key has been generated, a reply box will " +"appear under their collection of documents. You can use this box to write " +"encrypted replies to them." +msgstr "" +"SecureDrop wird für diese Quelle einen sicheren Schlüssel für die " +"Verschlüsselung generieren, wenn Sie sich das nächste Mal anmeldet. Nach der " +"Generierung des Schlüssels erscheint ein Antwortfeld unter der " +"Dokumentensammlung. Über dieses Feld können Sie der Quelle verschlüsselte " +"Antworten schreiben." + +#: journalist_templates/flag.html:10 +msgid "Continue to the list of documents for {codename}..." +msgstr "Weiter zur Dokumentenliste von {codename}..." + +#: journalist_templates/index.html:4 +msgid "Sources" +msgstr "Quellen" + +#: journalist_templates/index.html:11 +msgid "Download Unread" +msgstr "Ungelesene herunterladen" + +#: journalist_templates/index.html:12 +msgid "Download" +msgstr "Herunterladen" + +#: journalist_templates/index.html:13 +msgid "Star" +msgstr "Markieren" + +#: journalist_templates/index.html:14 +msgid "Un-star" +msgstr "Markieren rückgängig machen" + +#: journalist_templates/index.html:42 +msgid "No documents have been submitted!" +msgstr "Es wurden keine Dokumente eingereicht!" + +#: journalist_templates/js-strings.html:3 +msgid "filter by codename" +msgstr "Nach Deckname filtern" + +#: journalist_templates/js-strings.html:4 +msgid "Select All" +msgstr "Alles auswählen" + +#: journalist_templates/js-strings.html:5 +msgid "Select Unread" +msgstr "Ungelesene auswählen" + +#: journalist_templates/js-strings.html:6 +msgid "Select None" +msgstr "Keine auswählen" + +#: journalist_templates/js-strings.html:7 +msgid "Are you sure you want to delete this collection?" +msgstr "Sind Sie sicher, dass Sie diese Sammlung löschen möchten?" + +#: journalist_templates/js-strings.html:8 +msgid "Are you sure you want to delete the {size} selected collections?" +msgstr "" +"Sind Sie sicher, dass Sie die {size} markierten Sammlungen löschen möchten?" + +#: journalist_templates/js-strings.html:9 +msgid "Are you sure you want to delete the {size} selected submissions?" +msgstr "" +"Sind Sie sicher, dass Sie die {size} markierten Einreichungen löschen " +"möchten?" + +#: journalist_templates/js-strings.html:10 +msgid "Are you sure you want to delete the user {username}?" +msgstr "Sind Sie sicher, dass Sie den Benutzer {username} löschen möchten?" + +#: journalist_templates/js-strings.html:11 +msgid "" +"Are you sure you want to reset two-factor authentication for {username}?" +msgstr "" +"Sind Sie sicher, dass Sie die Zwei-Faktor-Authentifizierung für {username} " +"zurücksetzen möchten?" + +#: journalist_templates/login.html:4 +msgid "Login to access the journalist interface" +msgstr "" +"Melden Sie sich an, um auf die Journalistenoberfläche zugreifen zu können" + +#: journalist_templates/login.html:9 +msgid "Password" +msgstr "Passwort" + +#: journalist_templates/login.html:12 +msgid "LOG IN" +msgstr "ANMELDEN" + +#: source_app/__init__.py:73 +msgid "" +"<strong>WARNING:&nbsp;</strong> You appear to be using Tor2Web. This " +"<strong>&nbsp;does not&nbsp;</strong> provide anonymity. <a href=" +"\"{url}\">Why is this dangerous?</a>" +msgstr "" +"<strong>ACHTUNG:&nbsp;</strong> Es scheint, dass Sie Tor2Web benutzen. Das " +"garantiert <strong>&nbsp;keine Anonymität.&nbsp;</strong> <a href=\"{url}\">" +"Warum ist das gefährlich?</a>" + +#: source_app/forms.py:15 +msgid "Field must be between 1 and {max_codename_len} characters long." +msgstr "Feld muss zwischen 1 und {max_codename_len} Zeichen lang sein." + +#: source_app/forms.py:18 +msgid "Invalid input." +msgstr "Ungültige Eingabe." + +#: source_app/main.py:32 +msgid "" +"You were redirected because you are already logged in. If you want to create " +"a new account, you should log out first." +msgstr "" +"Sie wurden umgeleitet, weil Sie bereits angemeldet sind. Wenn Sie ein neues " +"Konto erstellen möchten, müssen Sie sich zuerst abmelden." + +#: source_app/main.py:111 +msgid "You must enter a message or choose a file to submit." +msgstr "" +"Sie müssen eine Nachricht eingeben oder eine Datei zum Versenden auswählen." + +#: source_app/main.py:144 +msgid "Thanks! We received your message." +msgstr "Danke! Wir haben Ihre Nachricht erhalten." + +#: source_app/main.py:146 +msgid "Thanks! We received your document." +msgstr "Danke! Wir haben Ihr Dokument erhalten." + +#: source_app/main.py:148 +msgid "Thanks! We received your message and document." +msgstr "Danke! Wir haben Ihre Nachricht und Dokument erhalten." + +#: source_app/main.py:190 +msgid "Reply deleted" +msgstr "Antwort gelöscht" + +#: source_app/main.py:207 +msgid "All replies have been deleted" +msgstr "Alle Antworten wurden gelöscht" + +#: source_app/main.py:221 +msgid "Sorry, that is not a recognized codename." +msgstr "Entschuldigung, das ist kein erkannter Deckname." + +#: source_templates/base.html:6 source_templates/index.html:4 +msgid "Protecting Journalists and Sources" +msgstr "Schützt Journalisten und Quellen" + +#: source_templates/base.html:24 source_templates/base.html:43 +#: source_templates/index.html:32 source_templates/index.html:84 +msgid "Powered by" +msgstr "Ermöglicht durch" + +#: source_templates/base.html:33 +msgid "LOG OUT" +msgstr "ABMELDEN" + +#: source_templates/base.html:43 source_templates/index.html:84 +msgid "" +"Like all software, SecureDrop may contain security bugs. Use at your own " +"risk." +msgstr "" +"Wie jede Software kann SecureDrop Sicherheitsfehler enthalten. Nutzung auf " +"eigene Gefahr." + +#: source_templates/error.html:3 +msgid "Server error" +msgstr "Serverfehler" + +#: source_templates/error.html:5 +msgid "" +"Sorry, the website encountered an error and was unable to complete your " +"request." +msgstr "" +"Entschuldigung, die Webseite ist auf einen Fehler gestoßen und konnte Ihre " +"Anfrage nicht bearbeiten." + +#: source_templates/error.html:7 source_templates/notfound.html:7 +msgid "Look up a codename..." +msgstr "Decknamen nachschlagen..." + +#: source_templates/first_submission_flashed_message.html:2 +msgid "Success!" +msgstr "Erfolg!" + +#: source_templates/first_submission_flashed_message.html:3 +msgid "" +"Thank you for sending this information to us. Please check back later for " +"replies." +msgstr "" +"Danke, dass Sie uns diese Information gesendet haben. Bitte kommen Sie " +"später wieder, um nach einer Antwort zu schauen." + +#: source_templates/first_submission_flashed_message.html:5 +msgid "Forgot your codename?" +msgstr "Decknamen vergessen?" + +#: source_templates/generate.html:4 +msgid "Welcome" +msgstr "Willkommen" + +#: source_templates/generate.html:7 +msgid "" +"Please either write this codename down and keep it in a safe place, or " +"memorize it.<br> This will allow you to log back in and receive replies from " +"journalists." +msgstr "" +"Bitte notieren Sie sich diesen Decknamen und bewahren Sie ihn an einem " +"sicheren Ort auf oder merken Sie ihn sich.<br> Auf diese Weise können Sie " +"sich wieder anmelden und Antworten von Journalisten erhalten." + +#: source_templates/generate.html:10 +msgid "" +"This codename is what you will use in future visits to receive messages from " +"our journalists in response to what you submit on the next screen." +msgstr "" +"Diesen Decknamen benutzen Sie, um bei zukünftigen Besuchen Nachrichten von " +"unseren Journalisten zu erhalten, die darauf antworten, was Sie im nächsten " +"Schritt versenden." + +#: source_templates/generate.html:34 +msgid "" +"Because we use none of the traditional means to track users of our " +"<strong>SecureDrop</strong>\n" +" service, in future visits, using this codename will be the only way we have " +"to communicate with you should we have\n" +" questions or are interested in additional documents. Unlike passwords, " +"there is no way to retrieve a lost codename." +msgstr "" +"Wir nutzen keines der traditionellen Mittel, um die Nutzer unseres " +"<strong>SecureDrop</strong>-Dienstes zu verfolgen.\n" +"Daher ist dieser Deckname die einzige Möglichkeit, um in Zukunft mit Ihnen " +"zu kommunizieren, sollten wir Fragen haben oder an weiteren Dokumenten " +"interessiert sein.\n" +"Anders als bei Passwörtern gibt es keine Möglichkeit, einen verlorenen " +"Decknamen wiederherzustellen." + +#: source_templates/generate.html:44 source_templates/index.html:65 +msgid "SUBMIT DOCUMENTS" +msgstr "DOKUMENTE VERSENDEN" + +#: source_templates/index.html:17 +msgid "" +"<strong>We recommend turning the Security Slider to High to protect your " +"anonymity:</strong> <a id=\"disable-js\" href=\"\">Learn how to set it to " +"high</a>, or ignore this warning to continue." +msgstr "" +"<strong>Wir empfehlen, die Sicherheitseinstellungen auf 'Hoch' zu stellen, " +"um Ihre Anonymität zu schützen:</strong><a id=\"disable-js\" href=" +"\"\">Erfahren Sie, wie man ihn auf hoch einstellt</a> oder ignorieren Sie " +"diese Warnung, um fortzufahren." + +#: source_templates/index.html:18 +msgid "" +"<strong>We recommend using Tor Browser to access SecureDrop:</strong> <a id=" +"\"recommend-tor\" href=\"{tor_browser_url}\">Learn how to install it</a>, or " +"ignore this warning to continue." +msgstr "" +"<strong>Wir empfehlen die Nutzung des Tor-Browsers für den Zugriff auf " +"SecureDrop:</strong> <a id=\"recommend-tor\" href=" +"\"{tor_browser_url}\">Erfahren Sie, wie Sie ihn installieren</a> oder " +"ignorieren Sie diese Warnung, um fortzufahren." + +#: source_templates/index.html:41 +msgid "Submit documents for the first time" +msgstr "Zum ersten Mal Dokumente senden" + +#: source_templates/index.html:48 +msgid "Already submitted something?" +msgstr "Haben Sie bereits etwas versendet?" + +#: source_templates/index.html:54 +msgid "" +"If this is your first time submitting documents to journalists, start here." +msgstr "" +"Wenn Sie zum ersten Mal Dokumente an Journalisten versenden, starten Sie " +"hier." + +#: source_templates/index.html:58 +msgid "" +"If you have already submitted documents in the past, log in here to check " +"for responses." +msgstr "" +"Wenn Sie bereits in der Vergangenheit Dokumente versendet haben, melden Sie " +"sich hier an, um nach Antworten zu schauen." + +#: source_templates/index.html:73 +msgid "CHECK FOR A RESPONSE" +msgstr "NACH EINER ANTWORT SCHAUEN" + +#: source_templates/index.html:92 +msgid "" +"You appear to be using the Tor Browser. You can turn the Security Slider to " +"High in 4 easy steps!" +msgstr "" +"Sie scheinen den Tor-Browser zu verwenden. In 4 einfachen Schritten können " +"Sie Ihre Sicherheitseinstellungen auf 'Hoch' stellen!" + +#: source_templates/index.html:94 +msgid "" +"Click the <img src=\"{icon}\" alt=\"Tor icon\"> Tor icon in the toolbar above" +msgstr "" +"Klicken Sie auf das <img src=\"{icon}\" alt=\"Tor-Symbol\"> Tor-Symbol in " +"der Werkzeugleiste oben" + +#: source_templates/index.html:95 +msgid "Click <strong>Security Settings...</strong>" +msgstr "Klicken Sie auf <strong>Sicherheitseinstellungen...</strong>" + +#: source_templates/index.html:96 +msgid "" +"Turn the Slider to <strong>High</strong>, then click <strong>Ok</strong>" +msgstr "" +"Stellen Sie den Schieber auf <strong>'Hoch'</strong>, klicken Sie dann " +"<strong>Ok</strong>" + +#: source_templates/index.html:97 +msgid "<a href=\"/\">Click here</a> to refresh the page" +msgstr "<a href=\"/\">Klicken Sie hier</a>, um die Seite zu aktualisieren" + +#: source_templates/login.html:6 +msgid "Enter Codename" +msgstr "Deckname eingeben" + +#: source_templates/login.html:12 +msgid "Enter your codename" +msgstr "Geben Sie Ihren Decknamen ein" + +#: source_templates/login.html:25 source_templates/lookup.html:47 +msgid "CANCEL" +msgstr "ABBRECHEN" + +#: source_templates/logout_flashed_message.html:4 +#: source_templates/session_timeout.html:5 +msgid "Important!" +msgstr "Wichtig!" + +#: source_templates/logout_flashed_message.html:5 +msgid "" +"Thank you for exiting your session! Please select \"New Identity\" from the " +"green onion button in the Tor browser to clear all history of your " +"SecureDrop usage from this device." +msgstr "" +"Danke, dass Sie Ihre Sitzung beendet haben! Bitte wählen Sie mit der grünen " +"Onion-Schaltfläche in Ihrem Tor-Browser \"Neue Identität\" aus, um den " +"gesamten Verlauf Ihrer SecureDrop-Nutzung von diesem Gerät zu löschen." + +#: source_templates/lookup.html:11 +msgid "Whew, it’s you! Now, the embarrassing part..." +msgstr "Puh, Sie sind es! Nun zum peinlichen Teil..." + +#: source_templates/lookup.html:12 +msgid "" +"Our servers experienced an unusual surge of new activity, when you last " +"visited. To err on the side of caution, we put a hold on sending all " +"documents from that day through to our journalists." +msgstr "" +"Bei Ihrem letzten Besuch haben unsere Server eine ungewöhliche " +"Aktivitätssteigerung entdeckt. Um auf Nummer sicher zu gehen, haben wir das " +"Versenden aller Dokumente an unsere Journalisten von diesem Tag an " +"aufgehalten." + +#: source_templates/lookup.html:14 +msgid "" +"Now that we know you’re really a human, though, we’ll get your previous " +"submission into the hands of a journalist straight away. We’re sorry for the " +"delay. Please do check back again in a week or so." +msgstr "" +"Nun da wir wissen, dass Sie ein echter Mensch sind, werden wir Ihre " +"vorherige Einreichung sofort an unsere Journalisten weitergeben. Wir " +"entschuldigen uns für die Verzögerung. Kommen Sie in etwa einer Woche zurück " +"und schauen Sie noch einmal nach." + +#: source_templates/lookup.html:20 +msgid "Submit Materials" +msgstr "Material einreichen" + +#: source_templates/lookup.html:21 +msgid "" +"If you are already familiar with GPG, you can optionally encrypt your files " +"and messages with our <a href=\"{url}\" class=\"text-link\">public key</a> " +"before submission. Files are encrypted as they are received by SecureDrop." +msgstr "" +"Wenn Sie bereits mit GPG vertraut sind, können Sie Ihre Dateien und " +"Nachrichten vor dem Versenden zusätzlich mit unserem<a href=\"{url}\" class=" +"\"text-link\">öffentlichen Schlüssel</a> verschlüsseln. Die Dateien werden " +"verschlüsselt, wenn Sie von SecureDrop empfangen werden." + +#: source_templates/lookup.html:22 +msgid "<a href=\"{url}\" class=\"text-link\">Learn more</a>." +msgstr "<a href=\"{url}\" class=\"text-link\">Erfahren Sie mehr</a>." + +#: source_templates/lookup.html:24 +msgid "You can send a file, a message, or both." +msgstr "Sie können eine Datei, eine Nachricht oder beides senden." + +#: source_templates/lookup.html:33 +msgid "Maximum upload size: 500 MB" +msgstr "Maximale Upload-Größe: 500 MB" + +#: source_templates/lookup.html:36 +msgid "Write a message." +msgstr "Nachricht schreiben." + +#: source_templates/lookup.html:53 +msgid "Read Replies" +msgstr "Antworten lesen" + +#: source_templates/lookup.html:58 +msgid "" +"You have received a reply. To protect your identity in the unlikely event " +"someone learns your codename, please delete all replies when you're done " +"with them. This also lets us know that you are aware of our reply. You can " +"respond by submitting a new message above." +msgstr "" +"Sie haben eine Antwort erhalten. Um Ihre Identität zu schützen, falls jemand " +"unwahrscheinlicherweise Ihren Decknamen erfährt, löschen Sie bitte alle " +"Antworten, sobald Sie gelesen sind. Dann bekommen auch wir mit, dass Sie die " +"Antwort erhalten haben. Sie können antworten, indem Sie oben eine neue " +"Nachricht versenden." + +#: source_templates/lookup.html:71 +msgid "Delete this reply?" +msgstr "Diese Antwort löschen?" + +#: source_templates/lookup.html:73 +msgid "DELETE" +msgstr "LÖSCHEN" + +#: source_templates/lookup.html:82 +msgid "DELETE ALL REPLIES" +msgstr "ALLE ANTWORTEN LÖSCHEN" + +#: source_templates/lookup.html:85 +msgid "Are you finished with the replies?" +msgstr "Sind Sie mit den Antworten fertig?" + +#: source_templates/lookup.html:86 +msgid "YES, DELETE ALL REPLIES" +msgstr "JA, ALLE ANTWORTEN LÖSCHEN" + +#: source_templates/lookup.html:87 +msgid "NO, NOT YET" +msgstr "NEIN, NICHT JETZT" + +#: source_templates/lookup.html:91 +msgid "There are no replies at this time." +msgstr "Zur Zeit gibt es keine Antworten." + +#: source_templates/lookup.html:100 +msgid "Remember, your codename is:" +msgstr "Denken Sie daran, Ihr Deckname ist:" + +#: source_templates/lookup.html:101 +msgid "Show" +msgstr "Anzeigen" + +#: source_templates/lookup.html:103 +msgid "Hide" +msgstr "Verstecken" + +#: source_templates/notfound.html:3 +msgid "Page not found" +msgstr "Seite nicht gefunden" + +#: source_templates/notfound.html:5 +msgid "Sorry, we couldn't locate what you requested." +msgstr "Leider konnten wir nicht finden, was Sie angefordert haben." + +#: source_templates/session_timeout.html:6 +msgid "" +"Your session timed out due to inactivity. Please login again if you want to " +"continue using SecureDrop, or select \"New Identity\" from the green onion " +"button in the Tor browser to clear all history of your SecureDrop usage from " +"this device. If you are not using Tor Browser, restart your browser." +msgstr "" +"Ihre Sitzung ist aufgrund von Inaktivität abgelaufen. Bitte melden Sie sich " +"erneut an, wenn Sie SecureDrop weiterhin verwenden möchten, oder wählen Sie " +"mit der grünen Onion-Schaltfläche in Ihrem Tor-Browser \"Neue Identität\" " +"aus, um den gesamten Verlauf Ihrer SecureDrop-Nutzung von diesem Gerät zu " +"löschen. Wenn Sie nutzen Tor-Browser nicht, starten Sie Ihre Browswer neu." + +#: source_templates/tor2web-warning.html:3 +msgid "Why is there a warning about Tor2Web?" +msgstr "Warum gibt es eine Warnung über Tor2Web?" + +#: source_templates/tor2web-warning.html:4 +msgid "" +"<a href=\"tor2web.org\">Tor2Web</a> is a proxy service that lets you browse " +"Tor Hidden Services (.onion sites) without installing Tor. It was designed " +"to facilitate anonymous publishing." +msgstr "" +"<a href=\"tor2web.org\">Tor2Web</a> ist ein Proxy-Dienst, der Ihnen " +"ermöglicht, Tor Hidden Services (.onion-Seiten) zu besuchen, ohne Tor zu " +"installieren. Es wurde entworfen, um anonymes Veröffentlichen zu " +"vereinfachen." + +#: source_templates/tor2web-warning.html:5 +msgid "" +"Tor2Web only protects publishers, not readers. If you upload documents to us " +"using Tor2Web, you are <strong>not anonymous</strong> and could be " +"identified by your ISP or the Tor2Web proxy operators. Additionally, since " +"Tor2Web sites typically do not use HTTPS, it is possible that your " +"connection could be MITM'ed by a capable adversary." +msgstr "" +"Tor2Web schützt nur den Veröffentlichenden, nicht die Leser. Wenn Sie mit " +"Tor2Web Dokumente zu uns hochladen, sind sie <strong>nicht anonym</strong> " +"und könnten von ihrem Anbieter oder den Tor2Web-Proxy-Betreibern " +"identifiziert werden. Da Tor2Web-Seiten in der Regel kein HTTPS nutzen, kann " +"es auch sein, dass Ihre Verbindung durch einen Man-in-the-Middle-Angriff " +"abgefangen wird." + +#: source_templates/tor2web-warning.html:6 +msgid "" +"If you want to submit information, you are <strong>strongly advised</strong> " +"to install <a href=\"torproject.org/download.html.en\">Tor</a> and use it to " +"access our site safely and anonymously." +msgstr "" +"Wenn Sie Informationen versenden wollen, empfehlen wir Ihnen " +"<strong>dringend</strong>, <a href=\"torproject.org/download.html.en\">Tor</" +"a> zu installieren und zu nutzen, um sicher und anonym auf unsere Seite " +"zuzugreifen." + +#: source_templates/use-tor-browser.html:3 +msgid "You Should Use Tor Browser" +msgstr "Sie sollten den Tor-Browser benutzen" + +#: source_templates/use-tor-browser.html:4 +msgid "" +"If you are not using Tor Browser, you <strong>may not be anonymous</strong>." +msgstr "" +"Wenn Sie nicht den Tor-Browser nutzen, dann könnten Sie <strong>nicht " +"anonym</strong> sein." + +#: source_templates/use-tor-browser.html:5 +msgid "" +"If you want to submit information to SecureDrop, we <strong>strongly advise " +"you</strong> to install Tor Browser and use it to access our site safely and " +"anonymously." +msgstr "" +"Wenn Sie Informationen an SecureDrop senden wollen, empfehlen wir Ihnen " +"<strong>dringend</strong>, den Tor-Browser zu installieren und zu nutzen, um " +"sicher und anonym auf unsere Seite zuzugreifen." + +#: source_templates/use-tor-browser.html:6 +msgid "" +"Copy and paste the following address into your browser and follow the " +"instructions to download and install Tor Browser:" +msgstr "" +"Kopieren Sie die folgende Adresse in Ihren Browser und folgen Sie der " +"Anleitung, um den Tor-Browser herunterzuladen und zu installieren:" + +#: source_templates/use-tor-browser.html:9 +msgid "" +"If there is a chance that downloading the Tor Browser raises suspicion and " +"your mail provider is less likely to be monitored, you can send a mail to " +"<pre>[email protected]</pre> and a bot will answer with instructions." +msgstr "" +"Wenn der Download des Tor-Browsers einen Verdacht erwecken könnte, und ihr " +"Mail-Provider weniger überwacht wird, können Sie einen E-Mail an " +"<pre>[email protected]</pre> schicken. Ein Roboter werdet mit " +"Anweisungen antworten." + +#: source_templates/why-journalist-key.html:3 +msgid "Why download the journalist's public key?" +msgstr "Warum den öffentlichen Schlüssel des Journalisten herunterladen?" + +#: source_templates/why-journalist-key.html:4 +msgid "" +"SecureDrop encrypts files and messages after they are submitted. Encrypting " +"messages and files before submission can provide an extra layer of security " +"before your data reaches the SecureDrop server." +msgstr "" +"SecureDrop verschlüsselt Dateien und Nachrichten nach dem Senden. Die " +"Verschlüsselung von Nachrichten und Dateien vor der Übermittlung kann eine " +"zusätzliche Sicherheitsschicht bieten, bevor Ihre Daten den SecureDrop-" +"Server erreichen." + +#: source_templates/why-journalist-key.html:5 +msgid "" +"If you are already familiar with the GPG encryption software, you may wish " +"to encrypt your submissions yourself. To do so:" +msgstr "" +"Wenn Sie bereits mit der GPG-Verschlüsselungssoftware vertraut sind, können " +"Sie Ihre Einreichungen selbst verschlüsseln. Machen Sie es wie so:" + +#: source_templates/why-journalist-key.html:7 +msgid "" +"<a href=\"{url}\">Download</a> the public key. The public key is a text file " +"with the extension <code>.asc</code>" +msgstr "" +"Laden Sie den öffentlichen Schlüssel <a href=\"{url}\">herunter</a>. Der " +"öffentliche Schlüssel ist eine Textdatei mit der Endung <code>.asc</code>" + +#: source_templates/why-journalist-key.html:8 +msgid "Import it into your GPG keyring." +msgstr "Importieren Sie ihn in Ihren GPG-Schlüsselbund." + +#: source_templates/why-journalist-key.html:10 +msgid "" +"If you are using <a href=\"{url}\">Tails</a>, you can double-click the " +"<code>.asc</code> file you just downloaded and it will be automatically " +"imported to your keyring." +msgstr "" +"Wenn Sie <a href=\"{url}\">Tails</a> nutzen, können Sie doppelt auf die " +"<code>.asc</code>-Datei klicken, die Sie gerade heruntergeladen haben. Sie " +"wird dann automatisch zu ihrem Schlüsselbund importiert." + +#: source_templates/why-journalist-key.html:11 +msgid "" +"If you are using Mac/Linux, open the terminal. You can import the key with " +"<code>gpg --import /path/to/key.asc</code>." +msgstr "" +"Wenn Sie Mac/Linux verwenden, öffnen Sie das Terminal. Sie können den " +"Schlüssel mit <code>gpg --import /path/to/key.asc</code> importieren." + +#: source_templates/why-journalist-key.html:14 +msgid "Encrypt your submission." +msgstr "Verschlüsseln Sie Ihre Einreichung." + +#: source_templates/why-journalist-key.html:16 +msgid "" +"You will need to be able to identify the key (this is called the \"user ID\" " +"or UID). Since the public key's filename is the key's fingerprint (with .asc " +"at the end), you can just copy and paste that. (don't include the <code>." +"asc</code>!)" +msgstr "" +"Sie müssen den Schlüssel identifizieren können (wird als \"Benutzerkennung\" " +"oder UID bezeichnet). Da der Dateiname des öffentlichen Schlüssels der " +"Fingerabdruck des Schlüssels ist (mit. asc am Ende), können Sie ihn einfach " +"kopieren und einfügen. (ziehen Sie <code>.asc</code> nicht mitein!)" + +#: source_templates/why-journalist-key.html:17 +msgid "" +"On all systems, open the Terminal and use this gpg command: <code>gpg --" +"recipient &lt;user ID&gt; --encrypt roswell_photos.pdf</code>" +msgstr "" +"Auf allen Systemen, öffnen Sie das Terminal und benutzen Sie diesen gpg-" +"Befehl: <code>gpg --recipient &lt;Benutzerkennung&gt; --encrypt " +"bielefeld_fotos.pdf</code>" + +#: source_templates/why-journalist-key.html:20 +msgid "" +"Upload your encrypted submission. It will have the same filename as the " +"unencrypted file, with .gpg at the end (e.g. <code>roswell_photos.pdf.gpg</" +"code>)" +msgstr "" +"Laden Sie ihre verschlüsselte Einreichung hoch. Sie wird den gleichen " +"Dateinamen haben wie die unverschlüsselte Datei, aber mit .gpg am Ende (z.B. " +"<code>bielefeld_fotos.pdf.gpg</code>)" + +#: source_templates/why-journalist-key.html:23 +msgid "" +"<strong>Tip:</strong> If you wish to remain anonymous, <strong>do not</" +"strong> use GPG to sign the encrypted file (with the <code>--sign</code> or " +"<code>-s</code> flag) as this will reveal your GPG identity to us." +msgstr "" +"<strong>Hinweis:</strong> Verwenden Sie <strong>nicht</strong> GPG, um die " +"verschlüsselte Datei zu signieren (mit der Option <code>--sign</code> oder " +"<code>-s</code>), denn das würde Ihre Identität verraten." + +#: source_templates/why-journalist-key.html:25 +msgid "Back to submission page" +msgstr "Zurück zur Einreichungsseite" + +#~ msgid "{doc_num} docs" +#~ msgstr "{doc_num} Dok." + +#~ msgid "{msg_num} messages" +#~ msgstr "{msg_num} Nachr." + +#~ msgid "Enable Google Authenticator" +#~ msgstr "Google Authenicator aktivieren" + +#~ msgid "Open the Google Authenticator app" +#~ msgstr "Öffnen Sie die Google Authenticator App" + +#~ msgid "Tap menu, then tap \"Set up account\", then tap \"Scan a barcode\"" +#~ msgstr "" +#~ "Tippen Sie auf Menü, tippen Sie anscließend auf \"Konto einrichten\", " +#~ "dann auf \"Barcode scannen\"" + +#~ msgid "USE NEW CODENAME" +#~ msgstr "NUTZE EIN NEUER DECKNAMEN" + +#~ msgid "USE EXISTING CODENAME" +#~ msgstr "NUTZE VORHANDENE DECKNAME" + +#~ msgid "messages {msg_num}" +#~ msgstr "Nach. {msg_num}" diff --git a/securedrop/tests/i18n/securedrop/translations/nl/LC_MESSAGES/messages.po b/securedrop/tests/i18n/securedrop/translations/nl/LC_MESSAGES/messages.po new file mode 100644 --- /dev/null +++ b/securedrop/tests/i18n/securedrop/translations/nl/LC_MESSAGES/messages.po @@ -0,0 +1,1378 @@ +# Dutch translations for SecureDrop. +# Copyright (C) 2017 Freedom of the Press Foundation +# This file is distributed under the same license as the SecureDrop project. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +msgid "" +msgstr "" +"Project-Id-Version: SecureDrop 0.3.12\n" +"Report-Msgid-Bugs-To: [email protected]\n" +"POT-Creation-Date: 2017-09-02 07:28+0000\n" +"PO-Revision-Date: 2018-01-10 08:59+0000\n" +"Last-Translator: kwadronaut <[email protected]>\n" +"Language-Team: Dutch " +"<https://weblate.securedrop.org/projects/securedrop/securedrop/nl/>\n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" +"X-Generator: Weblate 2.18\n" +"Generated-By: Babel 2.4.0\n" + +#: template_filters.py:14 +msgid "{time} ago" +msgstr "{time} geleden" + +#: journalist_app/__init__.py:34 journalist_app/__init__.py:76 +msgid "You have been logged out due to inactivity" +msgstr "U bent uitgelogd wegens inactiviteit" + +#: journalist_app/account.py:26 +msgid "Incorrect password or two-factor code." +msgstr "Foutief wachtwoord of tweestapstoken." + +#: journalist_app/account.py:42 +msgid "Token in two-factor authentication verified." +msgstr "Token voor tweestapsauthenticatie succesvol geverifieerd." + +#: journalist_app/account.py:46 journalist_app/admin.py:108 +msgid "Could not verify token in two-factor authentication." +msgstr "Token voor tweestapsauthenticatie kan niet worden gecontroleerd." + +#: journalist_app/admin.py:37 +msgid "Image updated." +msgstr "Afbeelding is bijgewerkt." + +#: journalist_app/admin.py:63 +msgid "" +"There was an error with the autogenerated password. User not created. Please " +"try again." +msgstr "" +"Er is een probleem opgetreden met het automatisch gegenereerde wachtwoord. " +"Gebruiker niet aangemaakt. Probeert opnieuw." + +#: journalist_app/admin.py:74 +msgid "That username is already in use" +msgstr "Deze gebruikersnaam bestaat al" + +#: journalist_app/admin.py:77 +msgid "" +"An error occurred saving this user to the database. Please inform your " +"administrator." +msgstr "" +"Er is een probleem opgetreden bij het opslaan van deze gebruiker in de " +"database. Neem contact op met de beheerder." + +#: journalist_app/admin.py:101 +msgid "Token in two-factor authentication accepted for user {user}." +msgstr "" +"Token voor tweestapsauthenticatie is geaccepteerd voor gebruiker {user}." + +#: journalist_app/admin.py:135 +msgid "Invalid secret format: please only submit letters A-F and numbers 0-9." +msgstr "" +"Ongeldige samenstelling voor geheim: gebruik uitsluitend letters A-F en " +"cijfers 0-9." + +#: journalist_app/admin.py:140 +msgid "Invalid secret format: odd-length secret. Did you mistype the secret?" +msgstr "" +"Ongeldig geheim: oneven lengte van geheim. Is het geheim correct gespeld?" + +#: journalist_app/admin.py:145 journalist_app/main.py:107 +#: journalist_app/utils.py:38 +msgid "An unexpected error occurred! Please inform your administrator." +msgstr "" +"Er is een onverwacht probleem opgetreden! Neem contact op met de beheerder." + +#: journalist_app/admin.py:178 +msgid "Username \"{user}\" already taken." +msgstr "Gebruikersnaam \"{user}\" is reeds in gebruik." + +#: journalist_app/admin.py:214 +msgid "Deleted user '{user}'" +msgstr "Gebruiker '{user}' verwijderd" + +#: journalist_app/col.py:47 +msgid "{source_name}'s collection deleted" +msgstr "{source_name}-collectie verwijderd" + +#: journalist_app/col.py:58 +msgid "No collections selected." +msgstr "Geen collecties geselecteerd." + +#: journalist_app/decorators.py:16 +msgid "Only administrators can access this page." +msgstr "U moet beheerder zijn om toegang tot deze pagina te krijgen." + +#: journalist_app/forms.py:19 +msgid "Field must be 40 characters long but got {num_chars}." +msgstr "Veld moet 40 tekens lang zijn, maar bevat er {num_chars}." + +#: journalist_app/forms.py:30 +msgid "" +"Field must be at least {min_chars} characters long but only got {num_chars}." +msgstr "" +"Veld moet minstens {min_chars} karakters lang zijn, maar bevat er slechts " +"{num_chars}." + +#: journalist_app/forms.py:35 source_app/forms.py:11 +msgid "This field is required." +msgstr "Dit is een verplicht veld." + +#: journalist_app/forms.py:52 +msgid "You cannot send an empty reply." +msgstr "U kunt geen leeg antwoord te versturen." + +#: journalist_app/forms.py:60 +msgid "File required." +msgstr "Bestand verplicht." + +#: journalist_app/forms.py:62 +msgid "Upload images only." +msgstr "Enkel afbeeldingen uploaden." + +#: journalist_app/main.py:118 +msgid "Thanks. Your reply has been stored." +msgstr "Bedankt. Uw antwoord is opgeslagen." + +#: journalist_app/main.py:139 +msgid "No collections selected for download." +msgstr "Geen collecties geselecteerd om te downloaden." + +#: journalist_app/main.py:142 journalist_app/utils.py:189 +msgid "No collections selected for deletion." +msgstr "Geen collecties geselecteerd om te verwijderen." + +#: journalist_app/main.py:168 +msgid "The source '{original_name}' has been renamed to '{new_name}'" +msgstr "De bron '{original_name}' is hernoemd naar '{new_name}'" + +#: journalist_app/main.py:183 +msgid "No unread submissions for this source." +msgstr "Geen ongelezen inzendingen van deze bron." + +#: journalist_app/utils.py:45 +msgid "Account updated." +msgstr "Account is bijgewerkt." + +#: journalist_app/utils.py:76 +msgid "Login failed." +msgstr "Inloggen mislukt." + +#: journalist_app/utils.py:84 +msgid "Please wait at least {seconds} second before logging in again." +msgid_plural "Please wait at least {seconds} seconds before logging in again." +msgstr[0] "" +"Wacht minimaal {seconds} seconde voordat u opnieuw probeert in te loggen." +msgstr[1] "" +"Wacht minimaal {seconds} seconden voordat u opnieuw probeert in te loggen." + +#: journalist_app/utils.py:95 +msgid "Please wait for a new two-factor token before trying again." +msgstr "Wacht op een nieuw tweestapstoken alvorens opnieuw te proberen." + +#: journalist_app/utils.py:141 +msgid "Submission deleted." +msgid_plural "{num} submissions deleted." +msgstr[0] "Inzending verwijderd." +msgstr[1] "{num} inzendingen verwijderd." + +#: journalist_app/utils.py:194 +msgid "{num} collection deleted" +msgid_plural "{num} collections deleted" +msgstr[0] "{num} collectie verwijderd" +msgstr[1] "{num} collecties verwijderd" + +#: journalist_app/utils.py:229 +msgid "You submitted a bad password! Password not changed." +msgstr "U heeft een ongeldig wachtwoord ingevoerd! Wachtwoord niet gewijzigd." + +#: journalist_app/utils.py:236 +msgid "" +"There was an error, and the new password might not have been saved " +"correctly. To prevent you from getting locked out of your account, you " +"should reset your password again." +msgstr "" +"Er is een probleem opgetreden en het nieuwe wachtwoord is mogelijk niet " +"correct opgeslagen. Om te voorkomen dat u niet meer kunt inloggen adviseren " +"wij om uw wachtwoord opnieuw te herstellen." + +#: journalist_app/utils.py:245 +msgid "" +"Password updated. Don't forget to save it in your KeePassX database. New " +"password:" +msgstr "" +"Wachtwoord updaten is gelukt. Vergeet niet om het op te slaan in uw KeePassX " +"database. Nieuwe wachtwoord:" + +#: journalist_app/utils.py:262 +msgid "No unread submissions in selected collections." +msgstr "Geen ongelezen inzendingen in de geselecteerde collecties." + +#: journalist_templates/_source_row.html:19 +msgid "1 doc" +msgid_plural "{doc_num} docs" +msgstr[0] "1 doc" +msgstr[1] "{doc_num} docs" + +#: journalist_templates/_source_row.html:20 +msgid "1 message" +msgid_plural "{msg_num} messages" +msgstr[0] "1 bericht" +msgstr[1] "{msg_num} berichten" + +#: journalist_templates/_source_row.html:23 +msgid "{num_unread} unread" +msgstr "{num_unread} ongelezen" + +#: journalist_templates/account_edit_hotp_secret.html:6 +#: journalist_templates/admin_edit_hotp_secret.html:7 +msgid "Change Secret" +msgstr "Verander geheim" + +#: journalist_templates/account_edit_hotp_secret.html:7 +#: journalist_templates/admin_add_user.html:33 +#: journalist_templates/admin_edit_hotp_secret.html:8 +msgid "HOTP Secret" +msgstr "HOTP Geheim" + +#: journalist_templates/account_edit_hotp_secret.html:9 +#: journalist_templates/admin_edit_hotp_secret.html:10 +#: source_templates/login.html:23 +msgid "CONTINUE" +msgstr "DOORGAAN" + +#: journalist_templates/account_new_two_factor.html:4 +#: journalist_templates/admin_new_user_two_factor.html:5 +msgid "Enable FreeOTP" +msgstr "FreeOTP activeren" + +#: journalist_templates/account_new_two_factor.html:5 +msgid "" +"You're almost done! To finish resetting your two-factor authentication, " +"follow the instructions below to set up FreeOTP. Once you've added the entry " +"for your account in the app, enter one of the 6-digit codes from the app to " +"confirm that two factor authentication is set up correctly." +msgstr "" +"U bent bijna klaar! Om het resetten van uw tweestapsauthenticatie te " +"voltooien volgt u de instructies van FreeOTP. Zodra u uw account in de app " +"hebt toegevoegd, moet u de zescijferige code van de app invoeren om te " +"bevestigen dat de tweestapsauthenticatie correct is ingesteld." + +#: journalist_templates/account_new_two_factor.html:8 +#: journalist_templates/admin_new_user_two_factor.html:9 +msgid "Install FreeOTP on your phone" +msgstr "Installeer FreeOTP op uw telefoon" + +#: journalist_templates/account_new_two_factor.html:9 +#: journalist_templates/admin_new_user_two_factor.html:10 +msgid "Open the FreeOTP app" +msgstr "Open de FreeOTP app" + +#: journalist_templates/account_new_two_factor.html:10 +#: journalist_templates/admin_new_user_two_factor.html:11 +msgid "Tap the QRcode symbol at the top" +msgstr "Tik op de QR code bovenaan" + +#: journalist_templates/account_new_two_factor.html:11 +#: journalist_templates/admin_new_user_two_factor.html:12 +msgid "" +"Your phone will now be in \"scanning\" mode. When you are in this mode, scan " +"the barcode below:" +msgstr "" +"Uw telefoon staat nu in \"scan\"-modus. Zodra u in deze modus zit, scan dan " +"de onderstaande barcode:" + +#: journalist_templates/account_new_two_factor.html:14 +#: journalist_templates/admin_new_user_two_factor.html:15 +msgid "Can't scan the barcode? Enter the following code manually:" +msgstr "" +"Lukt het niet de code te scannen? Voer dan handmatig de volgende code in:" + +#: journalist_templates/account_new_two_factor.html:15 +#: journalist_templates/admin_new_user_two_factor.html:18 +msgid "Once you have scanned the barcode, enter the 6-digit code below:" +msgstr "" +"Zodra u de barcode gescand hebt, voer dan hieronder de zescijferige code in:" + +#: journalist_templates/account_new_two_factor.html:17 +#: journalist_templates/admin_new_user_two_factor.html:20 +msgid "Enable YubiKey (OATH-HOTP)" +msgstr "YubiKey activeren (OATH-HOTP)" + +#: journalist_templates/account_new_two_factor.html:18 +#: journalist_templates/admin_new_user_two_factor.html:21 +msgid "Once you have configured your YubiKey, enter the 6-digit code below:" +msgstr "" +"Zodra u uw YubiKey geconfigureerd heeft, voer de zescijferige code hieronder " +"in:" + +#: journalist_templates/account_new_two_factor.html:22 +#: journalist_templates/admin_new_user_two_factor.html:25 +msgid "Verification code" +msgstr "Verificatiecode" + +#: journalist_templates/account_new_two_factor.html:24 +#: journalist_templates/admin_new_user_two_factor.html:27 +#: journalist_templates/col.html:83 source_templates/lookup.html:45 +msgid "SUBMIT" +msgstr "VERSTUREN" + +#: journalist_templates/admin.html:3 +msgid "Admin Interface" +msgstr "Beheerdersinterface" + +#: journalist_templates/admin.html:6 +#: journalist_templates/admin_add_user.html:39 +msgid "ADD USER" +msgstr "GEBRUIKER TOEVOEGEN" + +#: journalist_templates/admin.html:16 +#: journalist_templates/admin_add_user.html:14 +#: journalist_templates/login.html:8 +msgid "Username" +msgstr "Gebruikersnaam" + +#: journalist_templates/admin.html:17 +msgid "Edit" +msgstr "Bewerken" + +#: journalist_templates/admin.html:18 journalist_templates/index.html:15 +msgid "Delete" +msgstr "Verwijderen" + +#: journalist_templates/admin.html:19 +msgid "Created" +msgstr "Aangemaakt" + +#: journalist_templates/admin.html:20 +msgid "Last login" +msgstr "Laatste login" + +#: journalist_templates/admin.html:25 +msgid "Edit user {username}" +msgstr "Gebruiker {username} bewerken" + +#: journalist_templates/admin.html:26 +msgid "Delete user {username}" +msgstr "Gebruiker {username} verwijderen" + +#: journalist_templates/admin.html:31 +msgid "never" +msgstr "nooit" + +#: journalist_templates/admin.html:38 +msgid "No users to display" +msgstr "Geen gebruikers om weer te geven" + +#: journalist_templates/admin.html:44 +msgid "INSTANCE CONFIG" +msgstr "CONFIGURATIE INSTANTIE" + +#: journalist_templates/admin_add_user.html:4 +#: journalist_templates/config.html:4 journalist_templates/edit_account.html:7 +msgid "Back to admin interface" +msgstr "Terug naar beheerdersinterface" + +#: journalist_templates/admin_add_user.html:21 +msgid "The user's password will be:" +msgstr "Het wachtwoord van de gebruiker zal zijn:" + +#: journalist_templates/admin_add_user.html:24 +#: journalist_templates/edit_account.html:17 +msgid "Is Administrator" +msgstr "Is Beheerder" + +#: journalist_templates/admin_add_user.html:32 +msgid "Is using a YubiKey [HOTP]" +msgstr "Gebruikt een YubiKey [HOTP]" + +#: journalist_templates/admin_new_user_two_factor.html:6 +msgid "" +"You're almost done! To finish adding this new user, have them follow the " +"instructions below to set up two-factor authentication with FreeOTP. Once " +"they've added an entry for this account in the app, have them enter one of " +"the 6-digit codes from the app to confirm that two factor authentication is " +"set up correctly." +msgstr "" +"U bent bijna klaar! Om het toevoegen van deze nieuwe gebruiker af te ronden, " +"laat hem/haar onderstaande instructies volgen om de tweestapsauthenticatie " +"met FreeOTP in te stellen. Zodra hij/zij dit account heeft toegevoegd, laat " +"hem/haar vervolgens de zescijferige code uit de app invoeren om te " +"bevestigen dat de tweestapsauthenticatie succesvol is ingericht." + +#: journalist_templates/base.html:24 +msgid "Logged on as" +msgstr "Aangemeld als" + +#: journalist_templates/base.html:26 +msgid "Admin" +msgstr "Beheerder" + +#: journalist_templates/base.html:28 +msgid "Log Out" +msgstr "Uitloggen" + +#: journalist_templates/base.html:40 +msgid "" +"Powered by <br> <img src=\"/static/i/securedrop_small.png\" alt=\"SecureDrop" +"\">" +msgstr "" +"Mogelijk gemaakt door <br> <img src=\"/static/i/securedrop_small.png\" alt=" +"\"SecureDrop\">" + +#: journalist_templates/base.html:54 +msgid "Powered by <em>SecureDrop {version}</em>." +msgstr "Mogelijk gemaakt door <em>SecureDrop {version}</em>." + +#: journalist_templates/col.html:10 +msgid "All Sources" +msgstr "Alle bronnen" + +#: journalist_templates/col.html:13 +msgid "" +"Generate a new random codename for this source. We recommend doing this if " +"the first random codename is difficult to say or remember. You can generate " +"new random codenames as many times as you like." +msgstr "" +"Genereer een nieuwe willekeurige codenaam voor deze bron. We adviseren dit " +"indien de eerste willekeurige codenaam moeilijk uit te spreken of te " +"onthouden is. U kunt nieuwe willekeurige codenamen genereren zo vaak u wilt." + +#: journalist_templates/col.html:13 +msgid "Change codename" +msgstr "Codenaam veranderen" + +#: journalist_templates/col.html:14 +msgid "Are you sure you want to generate a new codename?" +msgstr "Weet u zeker dat u een nieuwe codenaam wilt genereren?" + +#: journalist_templates/col.html:15 source_templates/lookup.html:72 +msgid "Cancel" +msgstr "Annuleer" + +#: journalist_templates/col.html:16 +msgid "CONFIRM" +msgstr "BEVESTIG" + +#: journalist_templates/col.html:22 +msgid "" +"The documents are stored encrypted for security. To read them, you will need " +"to decrypt them using GPG." +msgstr "" +"De documenten zijn versleuteld opgeslagen. Om de documenten te lezen, moeten " +"ze ontsleuteld worden met behulp van GPG." + +#: journalist_templates/col.html:26 +msgid "Download Selected" +msgstr "Geselecteerde downloaden" + +#: journalist_templates/col.html:27 +msgid "Delete Selected" +msgstr "Geselecteerde verwijderen" + +#: journalist_templates/col.html:55 +msgid "Uploaded Document" +msgstr "Geüpload document" + +#: journalist_templates/col.html:57 journalist_templates/col.html:75 +msgid "Reply" +msgstr "Reageer" + +#: journalist_templates/col.html:59 +msgid "Message" +msgstr "Bericht" + +#: journalist_templates/col.html:70 +msgid "No documents to display." +msgstr "Geen documenten om weer te geven." + +#: journalist_templates/col.html:77 +msgid "" +"You can write a secure reply to the person who submitted these documents:" +msgstr "" +"U kunt beveiligd reageren naar de persoon die deze documenten gestuurd heeft:" + +#: journalist_templates/col.html:86 +msgid "You've flagged this source for reply." +msgstr "U heeft deze bron gemarkeerd om te reageren." + +#: journalist_templates/col.html:87 +msgid "" +"An encryption key will be generated for the source the next time they log " +"in, after which you will be able to reply to the source here." +msgstr "" +"Er wordt een encryptiesleutel gegenereerd voor de bron en wanneer deze de " +"volgende keer inlogt, kunt u daarna hier reageren naar de bron." + +#: journalist_templates/col.html:89 +msgid "Click below if you would like to write a reply to this source." +msgstr "Klik hieronder indien u wilt reageren naar deze bron." + +#: journalist_templates/col.html:93 +msgid "FLAG THIS SOURCE FOR REPLY" +msgstr "MARKEER DEZE BRON OM TE REAGEREN" + +#: journalist_templates/col.html:98 +msgid "" +"Click below to delete this source's collection. <em>Warning: If you do this, " +"the files seen here will be unrecoverable and the source will no longer be " +"able to login using their previous codename.</em>" +msgstr "" +"Klik hieronder om de collectie van deze bron te verwijderen. " +"<em>Waarschuwing: Indien de collectie verwijderd wordt, kunnen de documenten " +"niet hersteld worden en zal de bron niet langer kunnen inloggen met zijn/" +"haar vorige codenaam.</em>" + +#: journalist_templates/col.html:104 +msgid "DELETE COLLECTION" +msgstr "COLLECTIE VERWIJDEREN" + +#: journalist_templates/config.html:7 +msgid "Instance Configuration" +msgstr "Instantie configureren" + +#: journalist_templates/config.html:9 +msgid "Alerts" +msgstr "Waarschuwingen" + +#: journalist_templates/config.html:11 +msgid "Send a test encrypted email alert to verify OSSEC is working correctly:" +msgstr "" +"Verstuur bij wijze van test een versleutelde waarschuwingsmail om te " +"controleren of OSSEC correct werkt:" + +#: journalist_templates/config.html:15 +msgid "SEND TEST OSSEC ALERT" +msgstr "VERSTUUR OSSEC WAARSCHUWINGSTESTMAIL" + +#: journalist_templates/config.html:21 +msgid "Logo Image" +msgstr "Logo Afbeelding" + +#: journalist_templates/config.html:23 +msgid "" +"Here you can update the image displayed on the SecureDrop web interfaces:" +msgstr "" +"Hier kan u de afbeelding updaten die getoond wordt in de SecureDrop " +"webinterface:" + +#: journalist_templates/config.html:39 +msgid "UPDATE LOGO" +msgstr "BIJWERKEN LOGO" + +#: journalist_templates/delete.html:5 +msgid "" +"The following file has been selected for <strong>permanent deletion</strong>:" +msgid_plural "" +"The following {files} files have been selected for <strong>permanent " +"deletion</strong>:" +msgstr[0] "" +"Het onderstaande bestand is geselecteerd voor <strong>permanente " +"verwijdering</strong>:" +msgstr[1] "" +"De onderstaande bestanden zijn geselecteerd voor <strong>permanente " +"verwijdering</strong>:" + +#: journalist_templates/delete.html:20 +msgid "PERMANENTLY DELETE FILES" +msgstr "BESTANDEN PERMANENT VERWIJDEREN" + +#: journalist_templates/delete.html:23 +msgid "Return to the list of documents for {source_name}…" +msgstr "Ga terug naar de documentenlijst voor {source_name}…" + +#: journalist_templates/edit_account.html:6 +msgid "Edit user \"{user}\"" +msgstr "Gebruiker \"{user}\" bewerken" + +#: journalist_templates/edit_account.html:8 +msgid "Change Username &amp; Admin Status" +msgstr "Gebruikersnaam & Beheerderstatus wijzigen" + +#: journalist_templates/edit_account.html:12 +msgid "Change username" +msgstr "Gebruikersnaam wijzigen" + +#: journalist_templates/edit_account.html:19 +msgid "UPDATE" +msgstr "BIJWERKEN" + +#: journalist_templates/edit_account.html:22 +msgid "Edit your account" +msgstr "Wijzig uw account" + +#: journalist_templates/edit_account.html:25 +msgid "Reset Password" +msgstr "Herstel wachtwoord" + +#: journalist_templates/edit_account.html:27 +msgid "SecureDrop now uses automatically generated diceware passwords." +msgstr "SecureDrop gebruikt nu automatisch gegenereerde diceware wachtwoorden." + +#: journalist_templates/edit_account.html:28 +msgid "" +"Your password will be changed immediately, so you will need to save it " +"before pressing the \"Reset Password\" button." +msgstr "" +"Uw wachtwoord wordt meteen veranderd, dus sla het op voordat u op de " +"\"Herstel Wachtwoord\"-knop drukt." + +#: journalist_templates/edit_account.html:34 +msgid "Please enter your current password and two-factor code." +msgstr "Gelieve je huidige wachtwoord en tweestapstoken in te geven." + +#: journalist_templates/edit_account.html:40 +msgid "Current Password" +msgstr "Huidig wachtwoord" + +#: journalist_templates/edit_account.html:41 journalist_templates/login.html:10 +msgid "Two-factor Code" +msgstr "Tweestapstoken" + +#: journalist_templates/edit_account.html:46 +msgid "The user's password will be changed to:" +msgstr "Het wachtwoord van de gebruiker zal veranderd worden in:" + +#: journalist_templates/edit_account.html:48 +msgid "Your password will be changed to:" +msgstr "Uw wachtwoord wordt gewijzigd in:" + +#: journalist_templates/edit_account.html:53 +msgid "RESET PASSWORD" +msgstr "HERSTEL WACHTWOORD" + +#: journalist_templates/edit_account.html:58 +msgid "Reset Two-Factor Authentication" +msgstr "Herstel tweestapsauthenticatie" + +#: journalist_templates/edit_account.html:61 +msgid "" +"If a user's two-factor authentication credentials have been lost or " +"compromised, you can reset them here. <em>If you do this, make sure the user " +"is present and ready to set up their device with the new two-factor " +"credentials. Otherwise, they will be locked out of their account." +msgstr "" +"Indien gebruiker zijn inloggegevens voor tweestapsauthenticatie kwijt is of " +"mogelijk niet meer veilig, dan kunt u het hier herstellen. <em>Zorg ervoor " +"dat de gebruiker aanwezig is op het moment dat dit gedaan wordt en dat hun " +"apparaat gereed is voor inrichting met een nieuwe tweeledige authenticatie. " +"Anders kan de gebruiker niet meer inloggen op hun account." + +#: journalist_templates/edit_account.html:63 +msgid "" +"If your two-factor authentication credentials have been lost or compromised, " +"or you got a new device, you can reset your credentials here. <em>If you do " +"this, make sure you are ready to set up your new device, otherwise you will " +"be locked out of your account.</em>" +msgstr "" +"Indien uw tweestapsauthenticatie kwijt of gecompromitteerd zijn of u heeft " +"een nieuw apparaat, kunt u uw gegevens hier herstellen. <em>Indien u dit " +"doet, zorg dan dat alles is voorbereid om uw nieuw apparaat in te stellen, " +"anders wordt u buitengesloten van uw account.</em>" + +#: journalist_templates/edit_account.html:65 +msgid "" +"To reset two-factor authentication for mobile apps such as FreeOTP, choose " +"the first option. For hardware tokens like the Yubikey, choose the second." +msgstr "" +"Om tweestapsauthenticatie voor mobiele apps zoals FreeOTP te herstellen, " +"kies de eerste optie. Voor hardware-tokens zoals Yubikey kiest u de tweede." + +#: journalist_templates/edit_account.html:85 +msgid "RESET TWO-FACTOR AUTHENTICATION (APP)" +msgstr "HERSTEL TWEESTAPSAUTHENTICATIE (APP)" + +#: journalist_templates/edit_account.html:87 +msgid "RESET TWO-FACTOR AUTHENTICATION (HARDWARE TOKEN)" +msgstr "HERSTEL TWEESTAPSAUTHENTICATIE (HARDWARE TOKEN)" + +#: journalist_templates/flag.html:5 +msgid "Thanks!" +msgstr "Bedankt!" + +#: journalist_templates/flag.html:8 +msgid "" +"SecureDrop will generate a secure encryption key for this source the next " +"time that they log in. Once the key has been generated, a reply box will " +"appear under their collection of documents. You can use this box to write " +"encrypted replies to them." +msgstr "" +"SecureDrop zal de volgende keer dat de bron inlogt een veilige " +"encryptiesleutel genereren. Zodra de sleutel is gegenereerd zal er een " +"antwoordvakje verschijnen onder hun collectie documenten. U kunt dit vakje " +"gebruiken om versleutelde antwoorden naar hun te versturen." + +#: journalist_templates/flag.html:10 +msgid "Continue to the list of documents for {codename}..." +msgstr "Ga door naar de lijst documenten van {codename}..." + +#: journalist_templates/index.html:4 +msgid "Sources" +msgstr "Bronnen" + +#: journalist_templates/index.html:11 +msgid "Download Unread" +msgstr "Ongelezen downloaden" + +#: journalist_templates/index.html:12 +msgid "Download" +msgstr "Download" + +#: journalist_templates/index.html:13 +msgid "Star" +msgstr "Markeren" + +#: journalist_templates/index.html:14 +msgid "Un-star" +msgstr "De-markeren" + +#: journalist_templates/index.html:42 +msgid "No documents have been submitted!" +msgstr "Geen documenten ingezonden!" + +#: journalist_templates/js-strings.html:3 +msgid "filter by codename" +msgstr "filter op codenaam" + +#: journalist_templates/js-strings.html:4 +msgid "Select All" +msgstr "Selecteer Alles" + +#: journalist_templates/js-strings.html:5 +msgid "Select Unread" +msgstr "Ongelezen selecteren" + +#: journalist_templates/js-strings.html:6 +msgid "Select None" +msgstr "Niets selecteren" + +#: journalist_templates/js-strings.html:7 +msgid "Are you sure you want to delete this collection?" +msgstr "Weet u zeker dat u deze collectie wilt verwijderen?" + +#: journalist_templates/js-strings.html:8 +msgid "Are you sure you want to delete the {size} selected collections?" +msgstr "" +"Weet u zeker dat u de {size} geselecteerde collecties wilt verwijderen?" + +#: journalist_templates/js-strings.html:9 +msgid "Are you sure you want to delete the {size} selected submissions?" +msgstr "" +"Weet u zeker dat u de {size} geselecteerde inzendingen wilt verwijderen?" + +#: journalist_templates/js-strings.html:10 +msgid "Are you sure you want to delete the user {username}?" +msgstr "Weet u zeker dat u de gebruiker {username} wilt verwijderen?" + +#: journalist_templates/js-strings.html:11 +msgid "" +"Are you sure you want to reset two-factor authentication for {username}?" +msgstr "" +"Weet u zeker dat u de tweestapsauthenticatie voor {username} wilt herstellen?" + +#: journalist_templates/login.html:4 +msgid "Login to access the journalist interface" +msgstr "Inloggen om de interface voor journalisten te openen" + +#: journalist_templates/login.html:9 +msgid "Password" +msgstr "Wachtwoord" + +#: journalist_templates/login.html:12 +msgid "LOG IN" +msgstr "INLOGGEN" + +#: source_app/__init__.py:73 +msgid "" +"<strong>WARNING:&nbsp;</strong> You appear to be using Tor2Web. This " +"<strong>&nbsp;does not&nbsp;</strong> provide anonymity. <a href=" +"\"{url}\">Why is this dangerous?</a>" +msgstr "" +"<strong>WAARSCHUWING:&nbsp;</strong> Het lijkt erop dat u Tor2Web gebruikt. " +"Tor2Web zorgt <strong>&nbsp;niet&nbsp;</strong> voor anonimiteit. <a href=\"" +"{url}\">Waarom is dit gevaarlijk?</a>" + +#: source_app/forms.py:15 +msgid "Field must be between 1 and {max_codename_len} characters long." +msgstr "Veld moet tussen 1 en {max_codename_len} tekens lang zijn." + +#: source_app/forms.py:18 +msgid "Invalid input." +msgstr "Ongeldige invoer." + +#: source_app/main.py:32 +msgid "" +"You were redirected because you are already logged in. If you want to create " +"a new account, you should log out first." +msgstr "" +"U bent omgeleid omdat u al ingelogd bent. Indien u een nieuw account wilt " +"aanmaken, moet u eerst uitloggen." + +#: source_app/main.py:111 +msgid "You must enter a message or choose a file to submit." +msgstr "U moet een bericht ingeven of een bestand selecteren om te versturen." + +#: source_app/main.py:144 +msgid "Thanks! We received your message." +msgstr "Bedankt! We hebben uw bericht ontvangen." + +#: source_app/main.py:146 +msgid "Thanks! We received your document." +msgstr "Bedankt! We hebben uw document ontvangen." + +#: source_app/main.py:148 +msgid "Thanks! We received your message and document." +msgstr "Bedankt! We hebben uw bericht en document ontvangen." + +#: source_app/main.py:190 +msgid "Reply deleted" +msgstr "Reactie verwijderd" + +#: source_app/main.py:207 +msgid "All replies have been deleted" +msgstr "Alle reacties zijn verwijderd" + +#: source_app/main.py:221 +msgid "Sorry, that is not a recognized codename." +msgstr "Helaas is dit geen erkende codenaam." + +#: source_templates/base.html:6 source_templates/index.html:4 +msgid "Protecting Journalists and Sources" +msgstr "Journalisten en bronnen beschermen" + +#: source_templates/base.html:24 source_templates/base.html:43 +#: source_templates/index.html:32 source_templates/index.html:84 +msgid "Powered by" +msgstr "Mogelijk gemaakt door" + +#: source_templates/base.html:33 +msgid "LOG OUT" +msgstr "UITLOGGEN" + +#: source_templates/base.html:43 source_templates/index.html:84 +msgid "" +"Like all software, SecureDrop may contain security bugs. Use at your own " +"risk." +msgstr "" +"Mogelijk bevat SecureDrop zoals alle software beveiligingsfouten. Gebruik " +"het op eigen risico." + +#: source_templates/error.html:3 +msgid "Server error" +msgstr "Serverfout" + +#: source_templates/error.html:5 +msgid "" +"Sorry, the website encountered an error and was unable to complete your " +"request." +msgstr "Sorry, de website ondervond een fout en kon uw verzoek niet voltooien." + +#: source_templates/error.html:7 source_templates/notfound.html:7 +msgid "Look up a codename..." +msgstr "Zoek naar een codenaam..." + +#: source_templates/first_submission_flashed_message.html:2 +msgid "Success!" +msgstr "Gelukt!" + +#: source_templates/first_submission_flashed_message.html:3 +msgid "" +"Thank you for sending this information to us. Please check back later for " +"replies." +msgstr "" +"Bedankt voor het inzenden van deze informatie. Kom later terug om reacties " +"te bekijken." + +#: source_templates/first_submission_flashed_message.html:5 +msgid "Forgot your codename?" +msgstr "Uw codenaam vergeten?" + +#: source_templates/generate.html:4 +msgid "Welcome" +msgstr "Welkom" + +#: source_templates/generate.html:7 +msgid "" +"Please either write this codename down and keep it in a safe place, or " +"memorize it.<br> This will allow you to log back in and receive replies from " +"journalists." +msgstr "" +"Noteer deze codenaam en bewaar op een veilige plaats of leer hem uit het " +"hoofd.<br>Hiermee kan u later opnieuw inloggen en antwoorden ontvangen van " +"journalisten." + +#: source_templates/generate.html:10 +msgid "" +"This codename is what you will use in future visits to receive messages from " +"our journalists in response to what you submit on the next screen." +msgstr "" +"Dit is de codenaam die u gebruikt voor toekomstige bezoeken om berichten te " +"ontvangen van journalisten in antwoord op wat u op het volgende scherm " +"inzendt." + +#: source_templates/generate.html:34 +msgid "" +"Because we use none of the traditional means to track users of our " +"<strong>SecureDrop</strong>\n" +" service, in future visits, using this codename will be the only way we have " +"to communicate with you should we have\n" +" questions or are interested in additional documents. Unlike passwords, " +"there is no way to retrieve a lost codename." +msgstr "" +"Omdat we geen van de traditionele manieren gebruiken om gebruikers van onze " +"<strong>SecureDrop</strong>-service te traceren, is het gebruik van deze " +"codenaam de enige manier die we hebben om met u te communiceren mochten we " +"vragen hebben of geïnteresseerd zijn in extra documenten. In tegenstelling " +"tot wachtwoorden kunnen verloren codenamen niet teruggehaald worden." + +#: source_templates/generate.html:44 source_templates/index.html:65 +msgid "SUBMIT DOCUMENTS" +msgstr "DOCUMENTEN INZENDEN" + +#: source_templates/index.html:17 +msgid "" +"<strong>We recommend turning the Security Slider to High to protect your " +"anonymity:</strong> <a id=\"disable-js\" href=\"\">Learn how to set it to " +"high</a>, or ignore this warning to continue." +msgstr "" +"<strong>Wij adviseren om de Security Slider op Hoog te zetten om uw " +"anonimiteit te beschermen:</strong> <a id=\"disable-js\" href=\"\">Lees hier " +"hoe u dit op hoog zet</a> of negeer deze waarschuwing." + +#: source_templates/index.html:18 +msgid "" +"<strong>We recommend using Tor Browser to access SecureDrop:</strong> <a id=" +"\"recommend-tor\" href=\"{tor_browser_url}\">Learn how to install it</a>, or " +"ignore this warning to continue." +msgstr "" +"<strong>Wij adviseren om de Tor Browser te gebruiken om SecureDrop te " +"bezoeken:</strong> <a id=\"recommend-tor\" href=\"{tor_browser_url}\">Lees " +"hier informatie om deze te installeren</a> of negeer deze waarschuwing." + +#: source_templates/index.html:41 +msgid "Submit documents for the first time" +msgstr "Voor de eerste keer documenten inzenden" + +#: source_templates/index.html:48 +msgid "Already submitted something?" +msgstr "Al eerder iets ingezonden?" + +#: source_templates/index.html:54 +msgid "" +"If this is your first time submitting documents to journalists, start here." +msgstr "" +"Indien dit uw eerste keer is dat u documenten naar journalisten stuurt, " +"begin dan hier." + +#: source_templates/index.html:58 +msgid "" +"If you have already submitted documents in the past, log in here to check " +"for responses." +msgstr "" +"Indien u al eerder documenten ingezonden heeft, log dan hier in om te kijken " +"of u reacties heeft." + +#: source_templates/index.html:73 +msgid "CHECK FOR A RESPONSE" +msgstr "CHECK OF U ANTWOORD HEEFT" + +#: source_templates/index.html:92 +msgid "" +"You appear to be using the Tor Browser. You can turn the Security Slider to " +"High in 4 easy steps!" +msgstr "" +"Het lijkt erop dat u de Tor Browser gebruikt. U kan de Security Slider op " +"Hoog zetten in 4 simpele stappen!" + +#: source_templates/index.html:94 +msgid "" +"Click the <img src=\"{icon}\" alt=\"Tor icon\"> Tor icon in the toolbar above" +msgstr "" +"Klik op het <img src=\"{icon}\" alt=\"Tor icon\"> Tor icon in de werkbalk " +"hierboven" + +#: source_templates/index.html:95 +msgid "Click <strong>Security Settings...</strong>" +msgstr "Klik op <strong>Veiligheidsinstellingen...</strong>" + +#: source_templates/index.html:96 +msgid "" +"Turn the Slider to <strong>High</strong>, then click <strong>Ok</strong>" +msgstr "" +"Beweeg de Schuif naar <strong>Hoog</strong> en klik vervolgens op " +"<strong>Ok</strong>" + +#: source_templates/index.html:97 +msgid "<a href=\"/\">Click here</a> to refresh the page" +msgstr "<a href=\"/\">Klik hier</a> om de pagina te verversen" + +#: source_templates/login.html:6 +msgid "Enter Codename" +msgstr "Codenaam invoeren" + +#: source_templates/login.html:12 +msgid "Enter your codename" +msgstr "Voer uw codenaam in" + +#: source_templates/login.html:25 source_templates/lookup.html:47 +msgid "CANCEL" +msgstr "ANNULEER" + +#: source_templates/logout_flashed_message.html:4 +#: source_templates/session_timeout.html:5 +msgid "Important!" +msgstr "Belangrijk!" + +#: source_templates/logout_flashed_message.html:5 +msgid "" +"Thank you for exiting your session! Please select \"New Identity\" from the " +"green onion button in the Tor browser to clear all history of your " +"SecureDrop usage from this device." +msgstr "" +"Bedankt voor het uitloggen! Klik nu op \"New Identity\" onder de knop met de " +"groene ui in de Tor Browser om uw gebruikersgeschiedenis van SecureDrop op " +"dit apparaat te verwijderen." + +#: source_templates/lookup.html:11 +msgid "Whew, it’s you! Now, the embarrassing part..." +msgstr "Gelukkig, het ligt niet aan u! En dan nu het gênante deel..." + +#: source_templates/lookup.html:12 +msgid "" +"Our servers experienced an unusual surge of new activity, when you last " +"visited. To err on the side of caution, we put a hold on sending all " +"documents from that day through to our journalists." +msgstr "" +"Onze servers hebben het onverwacht druk gekregen tijdens uw laatste bezoek. " +"Uit voorzorg hebben we alle documenten die u die dag wilde verzenden in de " +"wacht gezet." + +#: source_templates/lookup.html:14 +msgid "" +"Now that we know you’re really a human, though, we’ll get your previous " +"submission into the hands of a journalist straight away. We’re sorry for the " +"delay. Please do check back again in a week or so." +msgstr "" +"Nu we zeker weten dat u echt een mens bent zorgen we ervoor dat de vorige " +"inzendingen direct doorgestuurd worden naar een journalist. We " +"verontschuldigen ons voor de vertraging. Kom nog eens terug over ongeveer " +"een week." + +#: source_templates/lookup.html:20 +msgid "Submit Materials" +msgstr "Materiaal inzenden" + +#: source_templates/lookup.html:21 +msgid "" +"If you are already familiar with GPG, you can optionally encrypt your files " +"and messages with our <a href=\"{url}\" class=\"text-link\">public key</a> " +"before submission. Files are encrypted as they are received by SecureDrop." +msgstr "" +"Indien u reeds bekend bent met GPG kunt u eventueel uw bestanden en " +"berichten versleutelen met onze <a href=\"{url}\" class=\"text-link" +"\">publieke sleutel</a> alvorens deze te versturen. Bestanden worden " +"versleuteld ontvangen door SecureDrop." + +#: source_templates/lookup.html:22 +msgid "<a href=\"{url}\" class=\"text-link\">Learn more</a>." +msgstr "<a href=\"{url}\" class=\"text-link\">Lees meer</a>." + +#: source_templates/lookup.html:24 +msgid "You can send a file, a message, or both." +msgstr "U kunt een bestand versturen, een bericht of beide." + +#: source_templates/lookup.html:33 +msgid "Maximum upload size: 500 MB" +msgstr "Maximale bestandsgrootte: 500 MB" + +#: source_templates/lookup.html:36 +msgid "Write a message." +msgstr "Schrijf een bericht." + +#: source_templates/lookup.html:53 +msgid "Read Replies" +msgstr "Reacties lezen" + +#: source_templates/lookup.html:58 +msgid "" +"You have received a reply. To protect your identity in the unlikely event " +"someone learns your codename, please delete all replies when you're done " +"with them. This also lets us know that you are aware of our reply. You can " +"respond by submitting a new message above." +msgstr "" +"U heeft een reactie ontvangen. Om uw identiteit te beschermen voor het " +"onwaarschijnlijke geval dat iemand uw codenaam kent, adviseren wij om alle " +"reacties te verwijderen zodra u er klaar mee bent. Hierdoor weten wij ook " +"dat u onze reactie heeft gezien. U kunt reageren door een nieuw bericht " +"hierboven te versturen." + +#: source_templates/lookup.html:71 +msgid "Delete this reply?" +msgstr "Deze reactie verwijderen?" + +#: source_templates/lookup.html:73 +msgid "DELETE" +msgstr "VERWIJDER" + +#: source_templates/lookup.html:82 +msgid "DELETE ALL REPLIES" +msgstr "VERWIJDER ALLE REACTIES" + +#: source_templates/lookup.html:85 +msgid "Are you finished with the replies?" +msgstr "Heeft u de reacties gelezen?" + +#: source_templates/lookup.html:86 +msgid "YES, DELETE ALL REPLIES" +msgstr "JA, VERWIJDER ALLE REACTIES" + +#: source_templates/lookup.html:87 +msgid "NO, NOT YET" +msgstr "NEE, NOG NIET" + +#: source_templates/lookup.html:91 +msgid "There are no replies at this time." +msgstr "Op dit moment zijn er geen reacties." + +#: source_templates/lookup.html:100 +msgid "Remember, your codename is:" +msgstr "Vergeet niet, uw codenaam is:" + +#: source_templates/lookup.html:101 +msgid "Show" +msgstr "Tonen" + +#: source_templates/lookup.html:103 +msgid "Hide" +msgstr "Verbergen" + +#: source_templates/notfound.html:3 +msgid "Page not found" +msgstr "Pagina niet gevonden" + +#: source_templates/notfound.html:5 +msgid "Sorry, we couldn't locate what you requested." +msgstr "Sorry, we kunnen niet vinden wat u zoekt." + +#: source_templates/session_timeout.html:6 +msgid "" +"Your session timed out due to inactivity. Please login again if you want to " +"continue using SecureDrop, or select \"New Identity\" from the green onion " +"button in the Tor browser to clear all history of your SecureDrop usage from " +"this device. If you are not using Tor Browser, restart your browser." +msgstr "" +"Je sessie is verlopen als gevolg van inactiviteit. Log opnieuw in als je " +"SecureDrop verder wilt gebruiken of selecteer \"New Identity\" onder de " +"groene ui in de Tor Browser om uw gebruikersgeschiedenis van dit apparaat te " +"verwijderen. Als je de Tor Browser niet gebruikt, herstart je browser." + +#: source_templates/tor2web-warning.html:3 +msgid "Why is there a warning about Tor2Web?" +msgstr "Waarom zie ik een waarschuwing over Tor2Web?" + +#: source_templates/tor2web-warning.html:4 +msgid "" +"<a href=\"tor2web.org\">Tor2Web</a> is a proxy service that lets you browse " +"Tor Hidden Services (.onion sites) without installing Tor. It was designed " +"to facilitate anonymous publishing." +msgstr "" +"<a href=\"tor2web.org\">Tor2Web</a> is een proxy service waarmee u Tor " +"Hidden Services (.onion sites) kunt gebruiken zonder dat u Tor hoeft te " +"installeren. Het is ontwikkeld om anoniem kunnen te publiceren." + +#: source_templates/tor2web-warning.html:5 +msgid "" +"Tor2Web only protects publishers, not readers. If you upload documents to us " +"using Tor2Web, you are <strong>not anonymous</strong> and could be " +"identified by your ISP or the Tor2Web proxy operators. Additionally, since " +"Tor2Web sites typically do not use HTTPS, it is possible that your " +"connection could be MITM'ed by a capable adversary." +msgstr "" +"Tor2Web beschermt alleen publicisten, niet lezers. U bent <strong>niet " +"anoniem</strong> wanneer u documenten uploadt via Tor2Web, wat inhoudt dat u " +"mogelijk geïdentificeerd kunt worden door uw internetprovider of de " +"beheerders van de Tor2Web proxy. Daarnaast is het mogelijk dat er een man-in-" +"the-middle-aanval uitgevoerd wordt door een kundige tegenpartij omdat " +"Tor2Web doorgaans geen HTTPS-technologie gebruikt." + +#: source_templates/tor2web-warning.html:6 +msgid "" +"If you want to submit information, you are <strong>strongly advised</strong> " +"to install <a href=\"torproject.org/download.html.en\">Tor</a> and use it to " +"access our site safely and anonymously." +msgstr "" +"Indien u informatie wilt inzenden adviseren wij <strong>ten zeerste aan</" +"strong> om <a href=\"torproject.org/download.html.en\">Tor</a> te " +"installeren en te gebruiken zodat u veilig en anoniem onze site kunt " +"bezoeken." + +#: source_templates/use-tor-browser.html:3 +msgid "You Should Use Tor Browser" +msgstr "Gebruik de Tor Browser" + +#: source_templates/use-tor-browser.html:4 +msgid "" +"If you are not using Tor Browser, you <strong>may not be anonymous</strong>." +msgstr "" +"Indien u de Tor Browser niet gebruikt, dan bestaat de kans dat u " +"<strong>niet anoniem</strong> bent." + +#: source_templates/use-tor-browser.html:5 +msgid "" +"If you want to submit information to SecureDrop, we <strong>strongly advise " +"you</strong> to install Tor Browser and use it to access our site safely and " +"anonymously." +msgstr "" +"Indien u informatie wilt inzenden via SecureDrop adviseren wij <strong>ten " +"strengste</strong> om de Tor Browser te installeren en te gebruiken om onze " +"site veilig en anoniem te bezoeken." + +#: source_templates/use-tor-browser.html:6 +msgid "" +"Copy and paste the following address into your browser and follow the " +"instructions to download and install Tor Browser:" +msgstr "" +"Kopieer en plak het volgende adres in uw browser en volg de instructies om " +"de Tor Browser te downloaden en te installeren:" + +#: source_templates/use-tor-browser.html:9 +msgid "" +"If there is a chance that downloading the Tor Browser raises suspicion and " +"your mail provider is less likely to be monitored, you can send a mail to " +"<pre>[email protected]</pre> and a bot will answer with instructions." +msgstr "" +"Wanneer de kans bestaat dat het downloaden van de Tor Browser argwaan wekt " +"en het minder waarschijnlijk is dat uw mail provider wordt gecontroleerd, " +"kan u een e-mail sturen naar <pre>[email protected]</pre> en een bot zal " +"met instructies antwoorden." + +#: source_templates/why-journalist-key.html:3 +msgid "Why download the journalist's public key?" +msgstr "Waarom moet ik de publieke sleutel van de journalist downloaden?" + +#: source_templates/why-journalist-key.html:4 +msgid "" +"SecureDrop encrypts files and messages after they are submitted. Encrypting " +"messages and files before submission can provide an extra layer of security " +"before your data reaches the SecureDrop server." +msgstr "" +"SecureDrop versleutelt bestanden en berichten nadat ze verstuurd zijn. " +"Berichten en bestanden versleutelen vóór het inzenden kan een extra " +"beveiligingslaag toevoegen alvorens uw gegevens de SecureDrop server " +"bereiken." + +#: source_templates/why-journalist-key.html:5 +msgid "" +"If you are already familiar with the GPG encryption software, you may wish " +"to encrypt your submissions yourself. To do so:" +msgstr "" +"Indien u al bekend bent met GPG versleutelingssoftware, wilt u dan uw " +"inzendingen zelf versleutelen. Om dit te doen:" + +#: source_templates/why-journalist-key.html:7 +msgid "" +"<a href=\"{url}\">Download</a> the public key. The public key is a text file " +"with the extension <code>.asc</code>" +msgstr "" +"<a href=\"{url}\">Download</a> de publieke sleutel. De publieke sleutel is " +"een tekstbestand met de <code>.asc</code>-bestandsextensie" + +#: source_templates/why-journalist-key.html:8 +msgid "Import it into your GPG keyring." +msgstr "Importeer dit in uw GPG sleutelring." + +#: source_templates/why-journalist-key.html:10 +msgid "" +"If you are using <a href=\"{url}\">Tails</a>, you can double-click the " +"<code>.asc</code> file you just downloaded and it will be automatically " +"imported to your keyring." +msgstr "" +"Indien u <a href=\"{url}\">Tails</a> gebruikt, kunt u dubbelklikken op het " +"<code>.asc</code>-bestand dat u zojuist gedownload hebt en het wordt " +"automatisch toegevoegd aan uw sleutelring." + +#: source_templates/why-journalist-key.html:11 +msgid "" +"If you are using Mac/Linux, open the terminal. You can import the key with " +"<code>gpg --import /path/to/key.asc</code>." +msgstr "" +"Indien u Mac/Linux gebruikt, open de terminal. U kunt de sleutel importeren " +"met <code>gpg --import /pad/naar/key.asc</code>." + +#: source_templates/why-journalist-key.html:14 +msgid "Encrypt your submission." +msgstr "Versleutel uw inzending." + +#: source_templates/why-journalist-key.html:16 +msgid "" +"You will need to be able to identify the key (this is called the \"user ID\" " +"or UID). Since the public key's filename is the key's fingerprint (with .asc " +"at the end), you can just copy and paste that. (don't include the <code>." +"asc</code>!)" +msgstr "" +"U moet de sleutel kunnen identificeren (dit heet \"user ID\" of UID). Omdat " +"de bestandsnaam van de publieke sleutel ook de vingerafdruk van de sleutel " +"(met .asc aan het einde) is, kunt u deze gewoon kopiëren en plakken (zorg er " +"wel voor dat u het <code>.asc</code>-gedeelte niet mee kopieert!)" + +#: source_templates/why-journalist-key.html:17 +msgid "" +"On all systems, open the Terminal and use this gpg command: <code>gpg --" +"recipient &lt;user ID&gt; --encrypt roswell_photos.pdf</code>" +msgstr "" +"Op alle systemen, open de Terminal en voer de volgende gpg-opdracht uit: " +"<code>gpg --recipient &lt;user ID&gt; --encrypt roswell_photos.pdf</code>" + +#: source_templates/why-journalist-key.html:20 +msgid "" +"Upload your encrypted submission. It will have the same filename as the " +"unencrypted file, with .gpg at the end (e.g. <code>roswell_photos.pdf.gpg</" +"code>)" +msgstr "" +"Upload uw versleutelde inzending. Deze moet dezelfde bestandsnaam hebben als " +"het onversleutelde bestand, met .gpg als bestandsextensie (bijv.: " +"<code>roswell_photos.pdf.gpg</code>)" + +#: source_templates/why-journalist-key.html:23 +msgid "" +"<strong>Tip:</strong> If you wish to remain anonymous, <strong>do not</" +"strong> use GPG to sign the encrypted file (with the <code>--sign</code> or " +"<code>-s</code> flag) as this will reveal your GPG identity to us." +msgstr "" +"<strong>Tip:</strong> Indien u anoniem wilt blijven,gebruik dan " +"<strong>niet</strong> GPG om het versleutelde bestand te tekenen (met de " +"<code>--sign</code> of <code>-s</code> markering) aangezien wij hierdoor uw " +"GPG-identiteit kunnen zien." + +#: source_templates/why-journalist-key.html:25 +msgid "Back to submission page" +msgstr "Terug naar inzendingenpagina" + +#~ msgid "{doc_num} docs" +#~ msgstr "{doc_num} documenten" + +#~ msgid "{msg_num} messages" +#~ msgstr "{msg_num} berichten" + +#~ msgid "Enable Google Authenticator" +#~ msgstr "Google Authenticator aanzetten" + +#~ msgid "Open the Google Authenticator app" +#~ msgstr "Open de Google Authenticator app" + +#~ msgid "Tap menu, then tap \"Set up account\", then tap \"Scan a barcode\"" +#~ msgstr "" +#~ "Klik op menu, vervolgens op \"Account inrichten\" en klik daarna op " +#~ "\"Scan barcode\"" + +#~ msgid "USE NEW CODENAME" +#~ msgstr "GEBRUIK EEN NIEUWE CODENAAM" + +#~ msgid "USE EXISTING CODENAME" +#~ msgstr "GEBRUIK HUIDIGE CODENAAM" + +#~ msgid "messages {msg_num}" +#~ msgstr "berichten {msg_num}" diff --git a/securedrop/tests/migrations/__init__.py b/securedrop/tests/migrations/__init__.py new file mode 100644 diff --git a/securedrop/tests/migrations/helpers.py b/securedrop/tests/migrations/helpers.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/helpers.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +import random +import string + +from datetime import datetime + + +def random_bool(): + return bool(random.getrandbits(1)) + + +def bool_or_none(): + return random.choice([None, True, False]) + + +def random_bytes(min, max, nullable): + if nullable and random_bool(): + return None + else: + # python2 just wants strings, fix this in python3 + return random_chars(random.randint(min, max)) + + +def random_name(): + len = random.randint(1, 100) + return random_chars(len) + + +def random_username(): + len = random.randint(3, 64) + return random_chars(len) + + +def random_chars(len, chars=string.printable): + return ''.join([random.choice(chars) for _ in range(len)]) + + +def random_ascii_chars(len, chars=string.ascii_lowercase): + return ''.join([random.choice(chars) for _ in range(len)]) + + +def random_datetime(nullable): + if nullable and random_bool(): + return None + else: + return datetime( + year=random.randint(1, 9999), + month=random.randint(1, 12), + day=random.randint(1, 28), + hour=random.randint(0, 23), + minute=random.randint(0, 59), + second=random.randint(0, 59), + microsecond=random.randint(0, 1000), + ) diff --git a/securedrop/tests/migrations/migration_15ac9509fc68.py b/securedrop/tests/migrations/migration_15ac9509fc68.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_15ac9509fc68.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- + +import random +import string + +from sqlalchemy import text + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_chars, random_username, random_bytes, + random_datetime, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration has no upgrade because there are no tables in the + database prior to running, so there is no data to load or test. + ''' + + def __init__(self, config): + pass + + def load_data(self): + pass + + def check_upgrade(self): + pass + + +class DowngradeTester(): + + JOURNO_NUM = 200 + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + + for _ in range(self.SOURCE_NUM): + self.add_source() + + for jid in range(1, self.JOURNO_NUM, 10): + for _ in range(random.randint(1, 3)): + self.add_journalist_login_attempt(jid) + + for jid in range(1, self.JOURNO_NUM, 10): + for sid in range(1, self.SOURCE_NUM, 10): + self.add_reply(jid, sid) + + for sid in range(1, self.SOURCE_NUM, 10): + self.add_source_star(sid) + + for sid in range(1, self.SOURCE_NUM, 8): + for _ in range(random.randint(1, 3)): + self.add_submission(sid) + + # create "abandoned" submissions (issue #1189) + for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50): + self.add_submission(sid) + + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access); + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, journalist_designation, + flagged, last_updated, pending, interaction_count) + VALUES (:filesystem_id, :journalist_designation, :flagged, + :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_journalist_login_attempt(journalist_id): + params = { + 'timestamp': random_datetime(nullable=True), + 'journalist_id': journalist_id, + } + sql = '''INSERT INTO journalist_login_attempt (timestamp, + journalist_id) + VALUES (:timestamp, :journalist_id) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size) + VALUES (:journalist_id, :source_id, :filename, :size) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_source_star(source_id): + params = { + 'source_id': source_id, + 'starred': bool_or_none(), + } + sql = '''INSERT INTO source_stars (source_id, starred) + VALUES (:source_id, :starred) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_submission(source_id): + params = { + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (source_id, filename, size, + downloaded) + VALUES (:source_id, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''We don't need to check anything on this downgrade because the + migration drops all the tables. Thus, there is nothing to do. + ''' + pass diff --git a/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py b/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- + +import random +import string + +from sqlalchemy import text +from uuid import uuid4 + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_chars, random_username, random_bytes, + random_datetime, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +class Helper(): + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_journalist_login_attempt(journalist_id): + params = { + 'timestamp': random_datetime(nullable=True), + 'journalist_id': journalist_id, + } + sql = '''INSERT INTO journalist_login_attempt (timestamp, + journalist_id) + VALUES (:timestamp, :journalist_id) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size) + VALUES (:journalist_id, :source_id, :filename, :size) + ''' + db.engine.execute(text(sql), **params) + + @staticmethod + def extract(app): + with app.app_context(): + sql = '''SELECT j.id, count(distinct a.id), count(distinct r.id) + FROM journalists AS j + LEFT OUTER JOIN journalist_login_attempt AS a + ON a.journalist_id = j.id + LEFT OUTER JOIN replies AS r + ON r.journalist_id = j.id + GROUP BY j.id + ORDER BY j.id + ''' + res = list(db.session.execute(text(sql))) + return res + + +class UpgradeTester(Helper): + + JOURNO_NUM = 100 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + self.initial_data = None + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + + self.add_source() + + for jid in range(1, self.JOURNO_NUM): + for _ in range(random.randint(1, 3)): + self.add_journalist_login_attempt(jid) + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + self.initial_data = self.extract(self.app) + + def check_upgrade(self): + extracted = self.extract(self.app) + assert len(extracted) == self.JOURNO_NUM + assert extracted == self.initial_data + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access); + ''' + db.engine.execute(text(sql), **params) + + +class DowngradeTester(Helper): + + JOURNO_NUM = 100 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + self.initial_data = None + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + + self.add_source() + + for jid in range(1, self.JOURNO_NUM): + for _ in range(random.randint(1, 3)): + self.add_journalist_login_attempt(jid) + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + self.initial_data = self.extract(self.app) + + def check_downgrade(self): + extracted = self.extract(self.app) + assert len(extracted) == self.JOURNO_NUM + assert extracted == self.initial_data + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) diff --git a/securedrop/tests/migrations/migration_3d91d6948753.py b/securedrop/tests/migrations/migration_3d91d6948753.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_3d91d6948753.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_datetime, bool_or_none + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + self.add_source() + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, journalist_designation, + flagged, last_updated, pending, interaction_count) + VALUES (:filesystem_id, :journalist_designation, :flagged, + :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + sources = db.engine.execute( + text('SELECT * FROM sources')).fetchall() + assert len(sources) == self.SOURCE_NUM + + for source in sources: + assert source.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + self.add_source() + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, uuid, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM sources" + sources = db.engine.execute(text(sql)).fetchall() + + for source in sources: + try: + # This should produce an exception, as the column (should) + # be gone. + assert source['uuid'] is None + except NoSuchColumnError: + pass + + assert len(sources) == self.SOURCE_NUM diff --git a/securedrop/tests/migrations/migration_3da3fcab826a.py b/securedrop/tests/migrations/migration_3da3fcab826a.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_3da3fcab826a.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +import random +import os +from uuid import uuid4 + +from sqlalchemy import text + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_ascii_chars, random_datetime, bool_or_none + + +TEST_DATA_DIR = '/tmp/securedrop/store' + + +def create_file_in_dummy_source_dir(filename): + filesystem_id = 'dummy' + basedir = os.path.join(TEST_DATA_DIR, filesystem_id) + + if not os.path.exists(basedir): + os.makedirs(basedir) + + path_to_file = os.path.join(basedir, filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) + + +class UpgradeTester: + + """This migration verifies that any orphaned submission or reply data from + deleted sources is also deleted. + """ + + def __init__(self, config): + self.config = config + self.app = create_app(config) + self.journalist_id = None + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + self.add_source() + self.valid_source_id = 1 + deleted_source_id = 2 + + # Add submissions and replies with and without a valid source + self.add_submission(self.valid_source_id) + self.add_submission(deleted_source_id) + self.add_submission(deleted_source_id, with_file=False) + self.add_submission(None) # NULL source + + self.add_reply(self.journalist_id, self.valid_source_id) + self.add_reply(self.journalist_id, deleted_source_id) + self.add_reply(self.journalist_id, deleted_source_id, with_file=False) + self.add_reply(self.journalist_id, None) # NULL source + + db.session.commit() + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid4()), + 'username': random_chars(50), + 'session_nonce': 0 + } + sql = '''INSERT INTO journalists (uuid, username, session_nonce) + VALUES (:uuid, :username, :session_nonce) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def add_reply(self, journalist_id, source_id, with_file=True): + filename = '1-' + random_ascii_chars(5) + '-' + random_ascii_chars(5) + '-reply.gpg' + params = { + 'uuid': str(uuid4()), + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': filename, + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, uuid, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :uuid, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + if with_file: + create_file_in_dummy_source_dir(filename) + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + def add_submission(self, source_id, with_file=True): + filename = '1-' + random_ascii_chars(5) + '-' + random_ascii_chars(5) + '-doc.gz.gpg' + params = { + 'uuid': str(uuid4()), + 'source_id': source_id, + 'filename': filename, + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, + downloaded) + VALUES (:uuid, :source_id, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + if with_file: + create_file_in_dummy_source_dir(filename) + + def check_upgrade(self): + with self.app.app_context(): + submissions = db.engine.execute( + text('SELECT * FROM submissions')).fetchall() + + # Submissions without a source should be deleted + assert len(submissions) == 1 + for submission in submissions: + assert submission.source_id == self.valid_source_id + + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + + # Replies without a source should be deleted + assert len(replies) == 1 + for reply in replies: + assert reply.source_id == self.valid_source_id + + +class DowngradeTester: + # This is a destructive alembic migration, it cannot be downgraded + + def __init__(self, config): + self.config = config + + def load_data(self): + pass + + def check_downgrade(self): + pass diff --git a/securedrop/tests/migrations/migration_523fff3f969c.py b/securedrop/tests/migrations/migration_523fff3f969c.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_523fff3f969c.py @@ -0,0 +1,40 @@ +from sqlalchemy import text +from sqlalchemy.exc import OperationalError + +from db import db +from journalist_app import create_app + + +instance_config_sql = "SELECT * FROM instance_config" + + +class UpgradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + pass + + def check_upgrade(self): + with self.app.app_context(): + db.engine.execute(text(instance_config_sql)).fetchall() + + +class DowngradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + pass + + def check_downgrade(self): + with self.app.app_context(): + try: + db.engine.execute(text(instance_config_sql)).fetchall() + + # The SQLite driver appears to return this rather than the + # expected NoSuchTableError. + except OperationalError: + pass diff --git a/securedrop/tests/migrations/migration_60f41bb14d98.py b/securedrop/tests/migrations/migration_60f41bb14d98.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_60f41bb14d98.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, random_name, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration verifies that the session_nonce column now exists, and + that the data migration completed successfully. + ''' + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'uuid': str(uuid.uuid4()), + 'first_name': random_name(), + 'last_name': random_name(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, uuid, first_name, last_name, + pw_salt, pw_hash, is_admin, otp_secret, is_totp, hotp_counter, + last_token, created_on, last_access, passphrase_hash) + VALUES (:username, :uuid, :first_name, :last_name, :pw_salt, + :pw_hash, :is_admin, :otp_secret, :is_totp, :hotp_counter, + :last_token, :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + journalists = db.engine.execute( + text('SELECT * FROM journalists')).fetchall() + + for journalist in journalists: + assert journalist.session_nonce is not None + + +class DowngradeTester(): + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'uuid': str(uuid.uuid4()), + 'first_name': random_name(), + 'last_name': random_name(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'session_nonce': random.randint(0, 10000), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, uuid, first_name, last_name, + pw_salt, pw_hash, is_admin, session_nonce, otp_secret, is_totp, + hotp_counter, last_token, created_on, last_access, passphrase_hash) + VALUES (:username, :uuid, :first_name, :last_name, :pw_salt, + :pw_hash, :is_admin, :session_nonce, :otp_secret, :is_totp, + :hotp_counter, :last_token, :created_on, :last_access, + :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the session_nonce column is now gone, but otherwise the + table has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(sql)).fetchall() + + for journalist in journalists: + try: + # This should produce an exception, as the column (should) + # be gone. + assert journalist['session_nonce'] is None + except NoSuchColumnError: + pass diff --git a/securedrop/tests/migrations/migration_6db892e17271.py b/securedrop/tests/migrations/migration_6db892e17271.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_6db892e17271.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid.uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the deleted_by_source column now exists, + and that the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + assert len(replies) == self.JOURNO_NUM - 1 + + for reply in replies: + assert reply.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'uuid': str(uuid.uuid4()), + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, uuid, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :uuid, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the deleted_by_source column is now gone, and + otherwise the table has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + + for reply in replies: + try: + # This should produce an exception, as the column (should) + # be gone. + assert reply['uuid'] is None + except NoSuchColumnError: + pass + + assert len(replies) == self.JOURNO_NUM - 1 diff --git a/securedrop/tests/migrations/migration_a9fe328b053a.py b/securedrop/tests/migrations/migration_a9fe328b053a.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_a9fe328b053a.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_chars + +random.seed('⎦˚◡˚⎣') + + +class Helper: + + def __init__(self): + self.journalist_id = None + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + } + sql = '''INSERT INTO journalists (uuid, username) + VALUES (:uuid, :username) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def create_journalist_after_migration(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + 'first_name': random_chars(50), + 'last_name': random_chars(50) + } + sql = ''' + INSERT INTO journalists (uuid, username, first_name, last_name) + VALUES (:uuid, :username, :first_name, :last_name) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + +class UpgradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + + def check_upgrade(self): + ''' + - Verify that Journalist first and last names are present after upgrade. + ''' + with self.app.app_context(): + journalists_sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(journalists_sql)).fetchall() + for journalist in journalists: + assert journalist['first_name'] is None + assert journalist['last_name'] is None + + +class DowngradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist_after_migration() + + def check_downgrade(self): + ''' + - Verify that Journalist first and last names are gone after downgrade. + ''' + with self.app.app_context(): + journalists_sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(journalists_sql)).fetchall() + for journalist in journalists: + try: + assert journalist['first_name'] + except NoSuchColumnError: + pass + try: + assert journalist['last_name'] + except NoSuchColumnError: + pass diff --git a/securedrop/tests/migrations/migration_b58139cfdc8c.py b/securedrop/tests/migrations/migration_b58139cfdc8c.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_b58139cfdc8c.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +import io +import os +import random +import uuid + +from os import path +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_chars, random_datetime + +random.seed('ᕕ( ᐛ )ᕗ') + +DATA = b'wat' +DATA_CHECKSUM = 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + + +class Helper: + + def __init__(self): + self.journalist_id = None + self.source_id = None + self._counter = 0 + + @property + def counter(self): + self._counter += 1 + return self._counter + + def create_journalist(self): + if self.journalist_id is not None: + raise RuntimeError('Journalist already created') + + params = { + 'uuid': str(uuid.uuid4()), + 'username': random_chars(50), + } + sql = '''INSERT INTO journalists (uuid, username) + VALUES (:uuid, :username) + ''' + self.journalist_id = db.engine.execute(text(sql), **params).lastrowid + + def create_source(self): + if self.source_id is not None: + raise RuntimeError('Source already created') + + self.source_filesystem_id = 'aliruhglaiurhgliaurg-{}'.format(self.counter) + params = { + 'filesystem_id': self.source_filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': False, + 'last_updated': random_datetime(nullable=True), + 'pending': False, + 'interaction_count': 0, + } + sql = '''INSERT INTO sources (filesystem_id, uuid, journalist_designation, flagged, + last_updated, pending, interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, :flagged, :last_updated, + :pending, :interaction_count) + ''' + self.source_id = db.engine.execute(text(sql), **params).lastrowid + + def create_submission(self, checksum=False): + filename = str(uuid.uuid4()) + params = { + 'uuid': str(uuid.uuid4()), + 'source_id': self.source_id, + 'filename': filename, + 'size': random.randint(10, 1000), + 'downloaded': False, + + } + + if checksum: + params['checksum'] = \ + 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, downloaded, checksum) + VALUES (:uuid, :source_id, :filename, :size, :downloaded, :checksum) + ''' + else: + sql = '''INSERT INTO submissions (uuid, source_id, filename, size, downloaded) + VALUES (:uuid, :source_id, :filename, :size, :downloaded) + ''' + + return (db.engine.execute(text(sql), **params).lastrowid, filename) + + def create_reply(self, checksum=False): + filename = str(uuid.uuid4()) + params = { + 'uuid': str(uuid.uuid4()), + 'source_id': self.source_id, + 'journalist_id': self.journalist_id, + 'filename': filename, + 'size': random.randint(10, 1000), + 'deleted_by_source': False, + } + + if checksum: + params['checksum'] = \ + 'sha256:f00a787f7492a95e165b470702f4fe9373583fbdc025b2c8bdf0262cc48fcff4' + sql = '''INSERT INTO replies (uuid, source_id, journalist_id, filename, size, + deleted_by_source, checksum) + VALUES (:uuid, :source_id, :journalist_id, :filename, :size, + :deleted_by_source, :checksum) + ''' + else: + sql = '''INSERT INTO replies (uuid, source_id, journalist_id, filename, size, + deleted_by_source) + VALUES (:uuid, :source_id, :journalist_id, :filename, :size, + :deleted_by_source) + ''' + return (db.engine.execute(text(sql), **params).lastrowid, filename) + + +class UpgradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + global DATA + with self.app.app_context(): + self.create_journalist() + self.create_source() + + submission_id, submission_filename = self.create_submission() + reply_id, reply_filename = self.create_reply() + + # we need to actually create files and write data to them so the RQ worker can hash them + for fn in [submission_filename, reply_filename]: + full_path = self.app.storage.path(self.source_filesystem_id, fn) + + dirname = path.dirname(full_path) + if not path.exists(dirname): + os.mkdir(dirname) + + with io.open(full_path, 'wb') as f: + f.write(DATA) + + def check_upgrade(self): + ''' + We cannot inject the `SDConfig` object provided by the fixture `config` into the alembic + subprocess that actually performs the migration. This is needed to get both the value of the + DB URL and access to the function `storage.path`. These values are passed to the `rqworker`, + and without being able to inject this config, the checksum function won't succeed. The above + `load_data` function provides data that can be manually verified by checking the `rqworker` + log file in `/tmp/`. + The other part of the migration, creating a table, cannot be tested regardless. + ''' + pass + + +class DowngradeTester(Helper): + + def __init__(self, config): + Helper.__init__(self) + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.create_journalist() + self.create_source() + + # create a submission and a reply that we don't add checksums to + self.create_submission(checksum=False) + self.create_reply(checksum=False) + + # create a submission and a reply that have checksums added + self.create_submission(checksum=True) + self.create_reply(checksum=True) + + # add a revoked token for enable a foreign key connection + self.add_revoked_token() + + def check_downgrade(self): + ''' + Verify that the checksum column is now gone. + The dropping of the revoked_tokens table cannot be checked. If the migration completes, + then it wokred correctly. + ''' + with self.app.app_context(): + sql = "SELECT * FROM submissions" + submissions = db.engine.execute(text(sql)).fetchall() + for submission in submissions: + try: + # this should produce an exception since the column is gone + submission['checksum'] + except NoSuchColumnError: + pass + + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + for reply in replies: + try: + # this should produce an exception since the column is gone + submission['checksum'] + except NoSuchColumnError: + pass + + def add_revoked_token(self): + params = { + 'journalist_id': self.journalist_id, + 'token': 'abc123', + } + sql = '''INSERT INTO revoked_tokens (journalist_id, token) + VALUES (:journalist_id, :token) + ''' + db.engine.execute(text(sql), **params) diff --git a/securedrop/tests/migrations/migration_e0a525cbab83.py b/securedrop/tests/migrations/migration_e0a525cbab83.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_e0a525cbab83.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'uuid': str(uuid.uuid4()), + 'filesystem_id': filesystem_id, + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (uuid, filesystem_id, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:uuid, :filesystem_id, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the deleted_by_source column now exists, + and that the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size) + VALUES (:journalist_id, :source_id, :filename, :size) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + replies = db.engine.execute( + text('SELECT * FROM replies')).fetchall() + assert len(replies) == self.JOURNO_NUM - 1 + + for reply in replies: + assert reply.deleted_by_source == False # noqa + + +class DowngradeTester(): + + SOURCE_NUM = 200 + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + add_journalist() + + add_source() + + for jid in range(1, self.JOURNO_NUM): + self.add_reply(jid, 1) + + db.session.commit() + + @staticmethod + def add_reply(journalist_id, source_id): + params = { + 'journalist_id': journalist_id, + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'deleted_by_source': False, + } + sql = '''INSERT INTO replies (journalist_id, source_id, filename, + size, deleted_by_source) + VALUES (:journalist_id, :source_id, :filename, :size, + :deleted_by_source) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the deleted_by_source column is now gone, and + otherwise the table has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM replies" + replies = db.engine.execute(text(sql)).fetchall() + + for reply in replies: + try: + # This should produce an exception, as the column (should) + # be gone. + assert reply['deleted_by_source'] is None + except NoSuchColumnError: + pass + + assert len(replies) == self.JOURNO_NUM - 1 diff --git a/securedrop/tests/migrations/migration_f2833ac34bb6.py b/securedrop/tests/migrations/migration_f2833ac34bb6.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_f2833ac34bb6.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- + +import random +import string +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import (random_bool, random_bytes, random_chars, random_datetime, + random_username, bool_or_none) + +random.seed('ᕕ( ᐛ )ᕗ') + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + journalists = db.engine.execute( + text('SELECT * FROM journalists')).fetchall() + + for journalist in journalists: + assert journalist.uuid is not None + + +class DowngradeTester(): + + JOURNO_NUM = 20 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + for _ in range(self.JOURNO_NUM): + self.add_journalist() + db.session.commit() + + @staticmethod + def add_journalist(): + if random_bool(): + otp_secret = random_chars(16, string.ascii_uppercase + '234567') + else: + otp_secret = None + + is_totp = random_bool() + if is_totp: + hotp_counter = 0 if random_bool() else None + else: + hotp_counter = random.randint(0, 10000) if random_bool() else None + + last_token = random_chars(6, string.digits) if random_bool() else None + + params = { + 'username': random_username(), + 'uuid': str(uuid.uuid4()), + 'pw_salt': random_bytes(1, 64, nullable=True), + 'pw_hash': random_bytes(32, 64, nullable=True), + 'is_admin': bool_or_none(), + 'otp_secret': otp_secret, + 'is_totp': is_totp, + 'hotp_counter': hotp_counter, + 'last_token': last_token, + 'created_on': random_datetime(nullable=True), + 'last_access': random_datetime(nullable=True), + 'passphrase_hash': random_bytes(32, 64, nullable=True) + } + sql = '''INSERT INTO journalists (username, uuid, pw_salt, pw_hash, + is_admin, otp_secret, is_totp, hotp_counter, last_token, + created_on, last_access, passphrase_hash) + VALUES (:username, :uuid, :pw_salt, :pw_hash, :is_admin, + :otp_secret, :is_totp, :hotp_counter, :last_token, + :created_on, :last_access, :passphrase_hash); + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM journalists" + journalists = db.engine.execute(text(sql)).fetchall() + + for journalist in journalists: + try: + # This should produce an exception, as the column (should) + # be gone. + assert journalist['uuid'] is None + except NoSuchColumnError: + pass diff --git a/securedrop/tests/migrations/migration_faac8092c123.py b/securedrop/tests/migrations/migration_faac8092c123.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_faac8092c123.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- + + +class UpgradeTester(): + '''This migration has no upgrade because it is only the enabling of + pragmas which do not affect database contents. + ''' + + def __init__(self, config): + pass + + def load_data(self): + pass + + def check_upgrade(self): + pass + + +class DowngradeTester(): + '''This migration has no downgrade because it is only the enabling of + pragmas, so we don't need to test the downgrade. + ''' + + def __init__(self, config): + pass + + def load_data(self): + pass + + def check_downgrade(self): + pass diff --git a/securedrop/tests/migrations/migration_fccf57ceef02.py b/securedrop/tests/migrations/migration_fccf57ceef02.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_fccf57ceef02.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- + +import random +import uuid + +from sqlalchemy import text +from sqlalchemy.exc import NoSuchColumnError + +from db import db +from journalist_app import create_app +from .helpers import random_bool, random_chars, random_datetime, bool_or_none + +random.seed('ᕕ( ᐛ )ᕗ') + + +def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + 'filesystem_id': filesystem_id, + 'uuid': str(uuid.uuid4()), + 'journalist_designation': random_chars(50), + 'flagged': bool_or_none(), + 'last_updated': random_datetime(nullable=True), + 'pending': bool_or_none(), + 'interaction_count': random.randint(0, 1000), + } + sql = '''INSERT INTO sources (filesystem_id, uuid, + journalist_designation, flagged, last_updated, pending, + interaction_count) + VALUES (:filesystem_id, :uuid, :journalist_designation, + :flagged, :last_updated, :pending, :interaction_count) + ''' + db.engine.execute(text(sql), **params) + + +class UpgradeTester(): + + '''This migration verifies that the UUID column now exists, and that + the data migration completed successfully. + ''' + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + add_source() + + for sid in range(1, self.SOURCE_NUM, 8): + for _ in range(random.randint(1, 3)): + self.add_submission(sid) + + # create "abandoned" submissions (issue #1189) + for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50): + self.add_submission(sid) + + db.session.commit() + + @staticmethod + def add_submission(source_id): + params = { + 'source_id': source_id, + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (source_id, filename, size, + downloaded) + VALUES (:source_id, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + def check_upgrade(self): + with self.app.app_context(): + submissions = db.engine.execute( + text('SELECT * FROM submissions')).fetchall() + + for submission in submissions: + assert submission.uuid is not None + + +class DowngradeTester(): + + SOURCE_NUM = 200 + + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + + for _ in range(self.SOURCE_NUM): + add_source() + + for sid in range(1, self.SOURCE_NUM, 8): + for _ in range(random.randint(1, 3)): + self.add_submission(sid) + + # create "abandoned" submissions (issue #1189) + for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50): + self.add_submission(sid) + + db.session.commit() + + @staticmethod + def add_submission(source_id): + params = { + 'source_id': source_id, + 'uuid': str(uuid.uuid4()), + 'filename': random_chars(50), + 'size': random.randint(0, 1024 * 1024 * 500), + 'downloaded': bool_or_none(), + } + sql = '''INSERT INTO submissions (source_id, uuid, filename, size, + downloaded) + VALUES (:source_id, :uuid, :filename, :size, :downloaded) + ''' + db.engine.execute(text(sql), **params) + + def check_downgrade(self): + '''Verify that the UUID column is now gone, but otherwise the table + has the expected number of rows. + ''' + with self.app.app_context(): + sql = "SELECT * FROM submissions" + submissions = db.engine.execute(text(sql)).fetchall() + + for submission in submissions: + try: + # This should produce an exception, as the column (should) + # be gone. + assert submission['uuid'] is None + except NoSuchColumnError: + pass diff --git a/securedrop/tests/pages-layout/functional_test.py b/securedrop/tests/pages-layout/functional_test.py deleted file mode 100644 --- a/securedrop/tests/pages-layout/functional_test.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# SecureDrop whistleblower submission system -# Copyright (C) 2017 Loic Dachary <[email protected]> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -import os -from os.path import abspath, dirname, realpath -import pytest - -from selenium.webdriver.common.action_chains import ActionChains -from selenium.webdriver.common.keys import Keys - -from selenium import webdriver - -from tests.functional import functional_test - - -def list_locales(): - if 'PAGE_LAYOUT_LOCALES' in os.environ: - locales = os.environ['PAGE_LAYOUT_LOCALES'].split(',') - else: - locales = ['en_US'] - return locales - - -class FunctionalTest(functional_test.FunctionalTest): - - @pytest.fixture(autouse=True, params=list_locales()) - def webdriver_fixture(self, request): - self.accept_languages = request.param - self.log_dir = abspath( - os.path.join(dirname(realpath(__file__)), - 'screenshots', self.accept_languages)) - os.system("mkdir -p " + self.log_dir) - firefox = self._prepare_webdriver() - profile = webdriver.FirefoxProfile() - profile.set_preference("intl.accept_languages", self.accept_languages) - self.override_driver = True - self.driver = self._create_webdriver(firefox, profile) - self._javascript_toggle() - - yield None - - self.driver.quit() - - def _screenshot(self, filename): - self.driver.set_window_size(1024, 500) # Trim size of images for docs - self.driver.save_screenshot(os.path.join(self.log_dir, filename)) - - def _javascript_toggle(self): - # the following is a noop for some reason, workaround it - # profile.set_preference("javascript.enabled", False) - # https://stackoverflow.com/a/36782979/837471 - self.driver.get("about:config") - actions = ActionChains(self.driver) - actions.send_keys(Keys.RETURN) - actions.send_keys("javascript.enabled") - actions.perform() - actions.send_keys(Keys.TAB) - actions.send_keys(Keys.RETURN) - actions.perform() - - def _save_alert(self, filename): - fd = open(os.path.join(self.log_dir, filename), 'wb') - fd.write(self.driver.switch_to.alert.text.encode('utf-8')) diff --git a/securedrop/tests/pages-layout/.gitignore b/securedrop/tests/pageslayout/.gitignore similarity index 100% rename from securedrop/tests/pages-layout/.gitignore rename to securedrop/tests/pageslayout/.gitignore diff --git a/securedrop/tests/pageslayout/__init__.py b/securedrop/tests/pageslayout/__init__.py new file mode 100644 diff --git a/securedrop/tests/pageslayout/functional_test.py b/securedrop/tests/pageslayout/functional_test.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/pageslayout/functional_test.py @@ -0,0 +1,82 @@ +# +# SecureDrop whistleblower submission system +# Copyright (C) 2017 Loic Dachary <[email protected]> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +import io +import logging +import os +from os.path import abspath +from os.path import dirname +from os.path import realpath + +import pytest + +from tests.functional import functional_test + +from PIL import Image + + +def list_locales(): + if "PAGE_LAYOUT_LOCALES" in os.environ: + locales = os.environ["PAGE_LAYOUT_LOCALES"].split(",") + else: + locales = ["en_US"] + return locales + + +def autocrop_btm(img, bottom_padding=12): + """Automatically crop the bottom of a screenshot.""" + # Get the grayscale of img + gray = img.convert('L') + # We start one row above the bottom since the "modal" windows screenshots + # have a bottom line color different than the background + btm = img.height - 2 + # Get the background luminance value from the bottom-leftmost pixel + bg = gray.getpixel((0, btm)) + + # Move up until the full row is not of the background luminance + while all([gray.getpixel((col, btm)) == bg for col in range(gray.width)]): + btm -= 1 + + btm = min(btm + bottom_padding, img.height) + + return img.crop((0, 0, img.width, btm)) + + +class FunctionalTest(functional_test.FunctionalTest): + default_driver_name = functional_test.FIREFOX + + @pytest.fixture(autouse=True, params=list_locales()) + def set_accept_languages(self, request): + accept_language_list = request.param.replace("_", "-") + logging.debug( + "accept_languages fixture: setting accept_languages to %s", accept_language_list + ) + self.accept_languages = accept_language_list + + def _screenshot(self, filename): + # revert the HTTP Accept-Language format + locale = self.accept_languages.replace("-", "_") + + log_dir = abspath( + os.path.join(dirname(realpath(__file__)), "screenshots", locale) + ) + if not os.path.exists(log_dir): + os.makedirs(log_dir) + + img = Image.open(io.BytesIO(self.driver.get_screenshot_as_png())) + cropped = autocrop_btm(img) + cropped.save(os.path.join(log_dir, filename)) diff --git a/securedrop/tests/pages-layout/test_journalist.py b/securedrop/tests/pageslayout/test_journalist.py similarity index 92% rename from securedrop/tests/pages-layout/test_journalist.py rename to securedrop/tests/pageslayout/test_journalist.py --- a/securedrop/tests/pages-layout/test_journalist.py +++ b/securedrop/tests/pageslayout/test_journalist.py @@ -15,28 +15,18 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # -from tests.functional import journalist_navigation_steps -from tests.functional import source_navigation_steps -import functional_test -import pytest - -import db +import time +import pytest [email protected] -def hardening(request): - hardening = db.LOGIN_HARDENING - - def finalizer(): - db.LOGIN_HARDENING = hardening - request.addfinalizer(finalizer) - db.LOGIN_HARDENING = True - return None +from tests.functional import journalist_navigation_steps +from tests.functional import source_navigation_steps +import tests.pageslayout.functional_test as pft @pytest.mark.pagelayout class TestJournalistLayout( - functional_test.FunctionalTest, + pft.FunctionalTest, source_navigation_steps.SourceNavigationStepsMixin, journalist_navigation_steps.JournalistNavigationStepsMixin): @@ -131,7 +121,7 @@ def test_col_no_documents(self): self._journalist_logs_in() self._journalist_visits_col() self._journalist_delete_all() - self._journalist_confirm_delete_all() + self._journalist_confirm_delete_selected() self._screenshot('journalist-col_no_document.png') def test_col_has_no_key(self): @@ -169,7 +159,6 @@ def test_col(self): self._screenshot('journalist-col.png') def test_col_javascript(self): - self._javascript_toggle() self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() @@ -207,11 +196,11 @@ def test_delete_none(self): self._source_logs_out() self._journalist_logs_in() self._journalist_visits_col() - self._journalist_delete_none() + self._journalist_clicks_delete_selected_link() + self._journalist_confirm_delete_selected() self._screenshot('journalist-delete_none.png') - def test_delete_one_javascript(self): - self._javascript_toggle() + def test_delete_one_confirmation(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() @@ -221,12 +210,11 @@ def test_delete_one_javascript(self): self._journalist_logs_in() self._journalist_visits_col() self._journalist_selects_first_doc() - self._journalist_clicks_delete_selected_javascript() - self._save_alert('journalist-delete_one_javascript.txt') - self._alert_accept() + self._journalist_clicks_delete_selected_link() + time.sleep(1) + self._screenshot('journalist-delete_one_confirmation.png') - def test_delete_all_javascript(self): - self._javascript_toggle() + def test_delete_all_confirmation(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() @@ -235,9 +223,9 @@ def test_delete_all_javascript(self): self._source_logs_out() self._journalist_logs_in() self._journalist_visits_col() - self._journalist_delete_all_javascript() - self._save_alert('journalist-delete_all_javascript.txt') - self._alert_accept() + self._journalist_delete_all_confirmation() + time.sleep(1) + self._screenshot('journalist-delete_all_confirmation.png') def test_delete_one(self): self._source_visits_source_homepage() @@ -249,6 +237,7 @@ def test_delete_one(self): self._journalist_logs_in() self._journalist_visits_col() self._journalist_delete_one() + self._journalist_confirm_delete_selected() self._screenshot('journalist-delete_one.png') def test_delete_all(self): @@ -261,6 +250,7 @@ def test_delete_all(self): self._journalist_logs_in() self._journalist_visits_col() self._journalist_delete_all() + self._journalist_confirm_delete_selected() self._screenshot('journalist-delete_all.png') def test_edit_account_user(self): @@ -311,7 +301,6 @@ def test_index(self): self._screenshot('journalist-index.png') def test_index_javascript(self): - self._javascript_toggle() self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() @@ -321,7 +310,7 @@ def test_index_javascript(self): self._journalist_logs_in() self._screenshot('journalist-index_javascript.png') self._journalist_selects_the_first_source() - self._journalist_selects_documents_to_download() + self._journalist_selects_all_documents() self._screenshot( 'journalist-clicks_on_source_and_selects_documents.png' ) diff --git a/securedrop/tests/pages-layout/test_source.py b/securedrop/tests/pageslayout/test_source.py similarity index 90% rename from securedrop/tests/pages-layout/test_source.py rename to securedrop/tests/pageslayout/test_source.py --- a/securedrop/tests/pages-layout/test_source.py +++ b/securedrop/tests/pageslayout/test_source.py @@ -17,7 +17,8 @@ # from tests.functional import journalist_navigation_steps from tests.functional import source_navigation_steps -import functional_test +from tests.functional.functional_test import TORBROWSER +from . import functional_test import pytest @@ -27,15 +28,6 @@ class TestSourceLayout( source_navigation_steps.SourceNavigationStepsMixin, journalist_navigation_steps.JournalistNavigationStepsMixin): - def test_index(self): - self._source_visits_source_homepage() - self._screenshot('source-index.png') - - def test_index_javascript(self): - self._javascript_toggle() - self._source_visits_source_homepage() - self._screenshot('source-index_javascript.png') - def test_lookup(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() @@ -70,14 +62,6 @@ def test_generate(self): self._source_chooses_to_submit_documents() self._screenshot('source-generate.png') - def test_logout_flashed_message(self): - self._source_visits_source_homepage() - self._source_chooses_to_submit_documents() - self._source_continues_to_submit_page() - self._source_submits_a_file() - self._source_logs_out() - self._screenshot('source-logout_flashed_message.png') - def test_submission_entered_text(self): self._source_visits_source_homepage() self._source_chooses_to_submit_documents() @@ -143,17 +127,37 @@ class TestSourceSessionLayout( functional_test.FunctionalTest, source_navigation_steps.SourceNavigationStepsMixin, journalist_navigation_steps.JournalistNavigationStepsMixin): + default_driver_name = TORBROWSER - def setup(self): - self.session_length_minutes = 0.03 - super(TestSourceSessionLayout, self).setup( - session_expiration=self.session_length_minutes) + session_expiration = 5 def test_source_session_timeout(self): + self.disable_js_torbrowser_driver() self._source_visits_source_homepage() self._source_clicks_submit_documents_on_homepage() self._source_continues_to_submit_page() - self._source_waits_for_session_to_timeout(self.session_length_minutes) + self._source_waits_for_session_to_timeout() self._source_enters_text_in_message_field() self._source_visits_source_homepage() self._screenshot('source-session_timeout.png') + + +class TestSourceLayoutTorbrowser( + functional_test.FunctionalTest, + source_navigation_steps.SourceNavigationStepsMixin, + journalist_navigation_steps.JournalistNavigationStepsMixin): + default_driver_name = TORBROWSER + + def test_index(self): + self.disable_js_torbrowser_driver() + self._source_visits_source_homepage() + self._screenshot('source-index.png') + + def test_logout_flashed_message(self): + self.disable_js_torbrowser_driver() + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_submits_a_file() + self._source_logs_out() + self._screenshot('source-logout_flashed_message.png') diff --git a/securedrop/tests/pytest.ini b/securedrop/tests/pytest.ini --- a/securedrop/tests/pytest.ini +++ b/securedrop/tests/pytest.ini @@ -1,4 +1,5 @@ [pytest] +log_format = %(created)f %(asctime)s %(lineno)4d:%(filename)-25s %(levelname)s %(message)s testpaths = . functional -usefixtures = setUptearDown +usefixtures = setUpTearDown addopts = --cov=../securedrop/ diff --git a/securedrop/tests/test_2fa.py b/securedrop/tests/test_2fa.py --- a/securedrop/tests/test_2fa.py +++ b/securedrop/tests/test_2fa.py @@ -1,121 +1,162 @@ # -*- coding: utf-8 -*- import os +import pytest +import time +from contextlib import contextmanager +from datetime import datetime, timedelta from flask import url_for -import flask_testing +from pyotp import TOTP os.environ['SECUREDROP_ENV'] = 'test' # noqa -from db import Journalist, BadTokenException -import journalist -import utils - - -class TestJournalist2FA(flask_testing.TestCase): - def create_app(self): - return journalist.app - - def setUp(self): - utils.env.setup() - self.admin, self.admin_pw = utils.db_helper.init_journalist( - is_admin=True) - self.user, self.user_pw = utils.db_helper.init_journalist() - - def tearDown(self): - utils.env.teardown() - - def _login_admin(self, token=None): - """Login to the Journalist Interface as an admin user with the - Werkzeug client. - - Args: - token (str): The TOTP token to attempt login with. Defaults - to the correct token for the current time window. - """ - if token is None: - token = self.admin.totp.now() - self.client.post(url_for('main.login'), - data=dict(username=self.admin.username, - password=self.admin_pw, - token=token)) - - def _login_user(self, token=None): - """Analagous to `_login_admin()` except for a non-admin user. - """ - if token is None: - token = self.user.totp.now() - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token=token)) - return resp - - def test_totp_reuse_protections(self): - """Ensure that logging in twice with the same TOTP token - fails. - """ - token = self.user.totp.now() - resp = self._login_user(token) - self.assertRedirects(resp, url_for('main.index')) - - resp = self._login_user(token) - self.assert200(resp) - self.assertIn("Login failed", resp.data) - - def test_totp_reuse_protections2(self): - """More granular than the preceeding test, we want to make sure - the right exception is being raised in the right place. - """ - valid_token = self.user.totp.now() - Journalist.login(self.user.username, self.user_pw, valid_token) - with self.assertRaises(BadTokenException): - Journalist.login(self.user.username, self.user_pw, valid_token) - - def test_bad_token_fails_to_verify_on_admin_new_user_two_factor_page(self): - # Regression test - # https://github.com/freedomofpress/securedrop/pull/1692 - self._login_admin() - - # Create and submit an invalid 2FA token - invalid_token = u'000000' - resp = self.client.post(url_for('admin.new_user_two_factor', - uid=self.admin.id), +from models import Journalist, BadTokenException +from .utils import login_user +from .utils.instrument import InstrumentedApp + + +@contextmanager +def totp_window(): + '''To ensure we have enough time during a single TOTP window to do the + whole test, optionally sleep. + ''' + + now = datetime.now() + # `or 30` to ensure we don't have a zero-length window + seconds_left_in_window = ((30 - now.second) % 30) or 30 + + window_end = now.replace(microsecond=0) + \ + timedelta(seconds=seconds_left_in_window) + window_end_delta = window_end - now + + # if we have less than 5 seconds left in this window, sleep to wait for + # the next window + if window_end_delta < timedelta(seconds=5): + timeout = window_end_delta.seconds + \ + window_end_delta.microseconds / 1000000.0 + time.sleep(timeout) + window_end = window_end + timedelta(seconds=30) + + yield + + # This check ensures that the token was used during the same window + # in the event that the app's logic only checks for token reuse if the + # token was valid. + now = datetime.now() + assert now < window_end + + +def test_totp_reuse_protections(journalist_app, test_journo, hardening): + """Ensure that logging in twice with the same TOTP token fails.""" + with totp_window(): + token = TOTP(test_journo['otp_secret']).now() + + with journalist_app.test_client() as app: + login_user(app, test_journo) + resp = app.get(url_for('main.logout'), follow_redirects=True) + assert resp.status_code == 200 + + with journalist_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(username=test_journo['username'], + password=test_journo['password'], + token=token)) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + + +def test_totp_reuse_protections2(journalist_app, test_journo, hardening): + """More granular than the preceeding test, we want to make sure the right + exception is being raised in the right place. + """ + + with totp_window(): + token = TOTP(test_journo['otp_secret']).now() + + with journalist_app.app_context(): + Journalist.login(test_journo['username'], + test_journo['password'], + token) + with pytest.raises(BadTokenException): + Journalist.login(test_journo['username'], + test_journo['password'], + token) + + +def test_bad_token_fails_to_verify_on_admin_new_user_two_factor_page( + journalist_app, test_admin, hardening): + '''Regression test for + https://github.com/freedomofpress/securedrop/pull/1692 + ''' + + invalid_token = '000000' + + with totp_window(): + with journalist_app.test_client() as app: + login_user(app, test_admin) + # Submit the token once + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.new_user_two_factor', + uid=test_admin['id']), data=dict(token=invalid_token)) - self.assert200(resp) - self.assertMessageFlashed( - 'Could not verify token in two-factor authentication.', 'error') - # last_token should be set to the invalid token we just tried to use - self.assertEqual(self.admin.last_token, invalid_token) - - # Submit the same invalid token again - resp = self.client.post(url_for('admin.new_user_two_factor', - uid=self.admin.id), + assert resp.status_code == 200 + ins.assert_message_flashed( + 'There was a problem verifying the two-factor code. Please try again.', + 'error') + + # last_token should be set to the token we just tried to use + with journalist_app.app_context(): + admin = Journalist.query.get(test_admin['id']) + assert admin.last_token == invalid_token + + with journalist_app.test_client() as app: + login_user(app, test_admin) + # Submit the same invalid token again + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.new_user_two_factor', + uid=test_admin['id']), + data=dict(token=invalid_token)) + ins.assert_message_flashed( + 'There was a problem verifying the two-factor code. Please try again.', + 'error' + ) + + +def test_bad_token_fails_to_verify_on_new_user_two_factor_page( + journalist_app, test_journo, hardening): + '''Regression test for + https://github.com/freedomofpress/securedrop/pull/1692 + ''' + invalid_token = '000000' + + with totp_window(): + with journalist_app.test_client() as app: + login_user(app, test_journo) + # Submit the token once + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.new_two_factor'), data=dict(token=invalid_token)) - # A flashed message should appear - self.assertMessageFlashed( - 'Could not verify token in two-factor authentication.', 'error') + assert resp.status_code == 200 + ins.assert_message_flashed( + 'There was a problem verifying the two-factor code. Please try again.', + 'error' + ) - def test_bad_token_fails_to_verify_on_new_user_two_factor_page(self): - # Regression test - # https://github.com/freedomofpress/securedrop/pull/1692 - self._login_user() + # last_token should be set to the token we just tried to use + with journalist_app.app_context(): + journo = Journalist.query.get(test_journo['id']) + assert journo.last_token == invalid_token - # Create and submit an invalid 2FA token - invalid_token = u'000000' - resp = self.client.post(url_for('account.new_two_factor'), - data=dict(token=invalid_token)) + with journalist_app.test_client() as app: + login_user(app, test_journo) - self.assert200(resp) - self.assertMessageFlashed( - 'Could not verify token in two-factor authentication.', 'error') - # last_token should be set to the invalid token we just tried to use - self.assertEqual(self.user.last_token, invalid_token) - - # Submit the same invalid token again - resp = self.client.post(url_for('account.new_two_factor'), + # Submit the same invalid token again + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.new_two_factor'), data=dict(token=invalid_token)) - - # A flashed message should appear - self.assertMessageFlashed( - 'Could not verify token in two-factor authentication.', 'error') + ins.assert_message_flashed( + 'There was a problem verifying the two-factor code. Please try again.', + 'error' + ) diff --git a/securedrop/tests/test_alembic.py b/securedrop/tests/test_alembic.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_alembic.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- + +import os +import pytest +import re +import subprocess + +from alembic.config import Config as AlembicConfig +from alembic.script import ScriptDirectory +from os import path +from sqlalchemy import text + +from db import db +from journalist_app import create_app + +MIGRATION_PATH = path.join(path.dirname(__file__), '..', 'alembic', 'versions') + +ALL_MIGRATIONS = [x.split('.')[0].split('_')[0] + for x in os.listdir(MIGRATION_PATH) + if x.endswith('.py')] + +WHITESPACE_REGEX = re.compile(r'\s+') + + +def list_migrations(cfg_path, head): + cfg = AlembicConfig(cfg_path) + script = ScriptDirectory.from_config(cfg) + migrations = [x.revision + for x in script.walk_revisions(base='base', head=head)] + migrations.reverse() + return migrations + + +def upgrade(alembic_config, migration): + subprocess.check_call(['alembic', 'upgrade', migration], + cwd=path.dirname(alembic_config)) + + +def downgrade(alembic_config, migration): + subprocess.check_call(['alembic', 'downgrade', migration], + cwd=path.dirname(alembic_config)) + + +def get_schema(app): + with app.app_context(): + result = list(db.engine.execute(text(''' + SELECT type, name, tbl_name, sql + FROM sqlite_master + ORDER BY type, name, tbl_name + '''))) + + return {(x[0], x[1], x[2]): x[3] for x in result} + + +def assert_schemas_equal(left, right): + for (k, v) in list(left.items()): + if k not in right: + raise AssertionError( + 'Left contained {} but right did not'.format(k)) + if not ddl_equal(v, right[k]): + raise AssertionError( + 'Schema for {} did not match:\nLeft:\n{}\nRight:\n{}' + .format(k, v, right[k])) + right.pop(k) + + if right: + raise AssertionError( + 'Right had additional tables: {}'.format(list(right.keys()))) + + +def ddl_equal(left, right): + '''Check the "tokenized" DDL is equivalent because, because sometimes + Alembic schemas append columns on the same line to the DDL comes out + like: + + column1 TEXT NOT NULL, column2 TEXT NOT NULL + + and SQLAlchemy comes out: + + column1 TEXT NOT NULL, + column2 TEXT NOT NULL + ''' + # ignore the autoindex cases + if left is None and right is None: + return True + + left = [x for x in WHITESPACE_REGEX.split(left) if x] + right = [x for x in WHITESPACE_REGEX.split(right) if x] + + # Strip commas and quotes + left = [x.replace("\"", "").replace(",", "") for x in left] + right = [x.replace("\"", "").replace(",", "") for x in right] + + return sorted(left) == sorted(right) + + +def test_alembic_head_matches_db_models(journalist_app, + alembic_config, + config): + '''This test is to make sure that our database models in `models.py` are + always in sync with the schema generated by `alembic upgrade head`. + ''' + models_schema = get_schema(journalist_app) + + os.remove(config.DATABASE_FILE) + + # Create database file + subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases']) + upgrade(alembic_config, 'head') + + # Recreate the app to get a new SQLALCHEMY_DATABASE_URI + app = create_app(config) + alembic_schema = get_schema(app) + + # The initial migration creates the table 'alembic_version', but this is + # not present in the schema created by `db.create_all()`. + alembic_schema = {k: v for k, v in list(alembic_schema.items()) + if k[2] != 'alembic_version'} + + assert_schemas_equal(alembic_schema, models_schema) + + [email protected]('migration', ALL_MIGRATIONS) +def test_alembic_migration_upgrade(alembic_config, config, migration): + # run migrations in sequence from base -> head + for mig in list_migrations(alembic_config, migration): + upgrade(alembic_config, mig) + + [email protected]('migration', ALL_MIGRATIONS) +def test_alembic_migration_downgrade(alembic_config, config, migration): + # upgrade to the parameterized test case ("head") + upgrade(alembic_config, migration) + + # run migrations in sequence from "head" -> base + migrations = list_migrations(alembic_config, migration) + migrations.reverse() + + for mig in migrations: + downgrade(alembic_config, mig) + + [email protected]('migration', ALL_MIGRATIONS) +def test_schema_unchanged_after_up_then_downgrade(alembic_config, + config, + migration): + # Create the app here. Using a fixture will init the database. + app = create_app(config) + + migrations = list_migrations(alembic_config, migration) + + if len(migrations) > 1: + target = migrations[-2] + upgrade(alembic_config, target) + else: + # The first migration is the degenerate case where we don't need to + # get the database to some base state. + pass + + original_schema = get_schema(app) + + upgrade(alembic_config, '+1') + downgrade(alembic_config, '-1') + + reverted_schema = get_schema(app) + + # The initial migration is a degenerate case because it creates the table + # 'alembic_version', but rolling back the migration doesn't clear it. + if len(migrations) == 1: + reverted_schema = {k: v for k, v in list(reverted_schema.items()) + if k[2] != 'alembic_version'} + + assert_schemas_equal(reverted_schema, original_schema) + + [email protected]('migration', ALL_MIGRATIONS) +def test_upgrade_with_data(alembic_config, config, migration): + migrations = list_migrations(alembic_config, migration) + if len(migrations) == 1: + # Degenerate case where there is no data for the first migration + return + + # Upgrade to one migration before the target stored in `migration` + last_migration = migrations[-2] + upgrade(alembic_config, last_migration) + + # Dynamic module import + mod_name = 'tests.migrations.migration_{}'.format(migration) + mod = __import__(mod_name, fromlist=['UpgradeTester']) + + # Load the test data + upgrade_tester = mod.UpgradeTester(config=config) + upgrade_tester.load_data() + + # Upgrade to the target + upgrade(alembic_config, migration) + + # Make sure it applied "cleanly" for some definition of clean + upgrade_tester.check_upgrade() + + [email protected]('migration', ALL_MIGRATIONS) +def test_downgrade_with_data(alembic_config, config, migration): + # Upgrade to the target + upgrade(alembic_config, migration) + + # Dynamic module import + mod_name = 'tests.migrations.migration_{}'.format(migration) + mod = __import__(mod_name, fromlist=['DowngradeTester']) + + # Load the test data + downgrade_tester = mod.DowngradeTester(config=config) + downgrade_tester.load_data() + + # Downgrade to previous migration + downgrade(alembic_config, '-1') + + # Make sure it applied "cleanly" for some definition of clean + downgrade_tester.check_downgrade() diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -1,200 +1,367 @@ # -*- coding: utf-8 -*- +from datetime import datetime +from hypothesis import given +from hypothesis.strategies import text +import io import os -import unittest +import pytest +import re os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config import crypto_util -import db -import store -import utils +import models +from crypto_util import CryptoUtil, CryptoException, FIFOCache +from db import db -class TestCryptoUtil(unittest.TestCase): - """The set of tests for crypto_util.py.""" +def test_word_list_does_not_contain_empty_strings(journalist_app): + assert '' not in journalist_app.crypto_util.get_wordlist('en') + assert '' not in journalist_app.crypto_util.nouns + assert '' not in journalist_app.crypto_util.adjectives - def setUp(self): - utils.env.setup() - def tearDown(self): - utils.env.teardown() +def test_clean(): + ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ' + 'KLMNOPQRSTUVWXYZ') + invalids = ['foo bar`', 'bar baz~'] - def test_word_list_does_not_contain_empty_strings(self): - self.assertNotIn('', (crypto_util._get_wordlist('en') - + crypto_util.nouns - + crypto_util.adjectives)) + assert crypto_util.clean(ok) == ok - def test_clean(self): - ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ' - 'KLMNOPQRSTUVWXYZ') - invalid_1 = 'foo bar`' - invalid_2 = 'bar baz~' + for invalid in invalids: + with pytest.raises(CryptoException) as err: + crypto_util.clean(invalid) + assert 'invalid input: {}'.format(invalid) in str(err) - self.assertEqual(ok, crypto_util.clean(ok)) - with self.assertRaisesRegexp(crypto_util.CryptoException, - 'invalid input: {}'.format(invalid_1)): - crypto_util.clean(invalid_1) - with self.assertRaisesRegexp(crypto_util.CryptoException, - 'invalid input: {}'.format(invalid_2)): - crypto_util.clean(invalid_2) - def test_encrypt_success(self): - source, _ = utils.db_helper.init_source() - message = str(os.urandom(1)) - ciphertext = crypto_util.encrypt( +def test_encrypt_success(source_app, config, test_source): + message = 'test' + + with source_app.app_context(): + ciphertext = source_app.crypto_util.encrypt( message, - [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY], - store.path(source.filesystem_id, 'somefile.gpg')) - - self.assertIsInstance(ciphertext, str) - self.assertNotEqual(ciphertext, message) - self.assertGreater(len(ciphertext), 0) - - def test_encrypt_failure(self): - source, _ = utils.db_helper.init_source() - with self.assertRaisesRegexp(crypto_util.CryptoException, - 'no terminal at all requested'): - crypto_util.encrypt( + [source_app.crypto_util.getkey(test_source['filesystem_id']), + config.JOURNALIST_KEY], + source_app.storage.path(test_source['filesystem_id'], + 'somefile.gpg')) + + assert isinstance(ciphertext, bytes) + assert ciphertext.decode('utf-8') != message + assert len(ciphertext) > 0 + + +def test_encrypt_failure(source_app, test_source): + with source_app.app_context(): + with pytest.raises(CryptoException) as err: + source_app.crypto_util.encrypt( str(os.urandom(1)), [], - store.path(source.filesystem_id, 'other.gpg')) - - def test_encrypt_without_output(self): - """We simply do not specify the option output keyword argument - to crypto_util.encrypt() here in order to confirm encryption - works when it defaults to `None`. - """ - source, codename = utils.db_helper.init_source() - message = str(os.urandom(1)) - ciphertext = crypto_util.encrypt( + source_app.storage.path(test_source['filesystem_id'], + 'other.gpg')) + assert 'no terminal at all requested' in str(err) + + +def test_encrypt_without_output(source_app, config, test_source): + """We simply do not specify the option output keyword argument + to crypto_util.encrypt() here in order to confirm encryption + works when it defaults to `None`. + """ + message = 'test' + with source_app.app_context(): + ciphertext = source_app.crypto_util.encrypt( message, - [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY]) - plaintext = crypto_util.decrypt(codename, ciphertext) - - self.assertEqual(message, plaintext) - - def test_encrypt_binary_stream(self): - """Generally, we pass unicode strings (the type form data is - returned as) as plaintext to crypto_util.encrypt(). These have - to be converted to "binary stream" types (such as `file`) before - we can actually call gnupg.GPG.encrypt() on them. This is done - in crypto_util.encrypt() with an `if` branch that uses - `gnupg._util._is_stream(plaintext)` as the predicate, and calls - `gnupg._util._make_binary_stream(plaintext)` if necessary. This - test ensures our encrypt function works even if we provide - inputs such that this `if` branch is skipped (i.e., the object - passed for `plaintext` is one such that - `gnupg._util._is_stream(plaintext)` returns `True`). - """ - source, codename = utils.db_helper.init_source() - with open(os.path.realpath(__file__)) as fh: - ciphertext = crypto_util.encrypt( + [source_app.crypto_util.getkey(test_source['filesystem_id']), + config.JOURNALIST_KEY]) + plaintext = source_app.crypto_util.decrypt( + test_source['codename'], + ciphertext) + + assert plaintext == message + + +def test_encrypt_binary_stream(source_app, config, test_source): + """Generally, we pass unicode strings (the type form data is + returned as) as plaintext to crypto_util.encrypt(). These have + to be converted to "binary stream" types (such as `file`) before + we can actually call gnupg.GPG.encrypt() on them. This is done + in crypto_util.encrypt() with an `if` branch that uses + `gnupg._util._is_stream(plaintext)` as the predicate, and calls + `gnupg._util._make_binary_stream(plaintext)` if necessary. This + test ensures our encrypt function works even if we provide + inputs such that this `if` branch is skipped (i.e., the object + passed for `plaintext` is one such that + `gnupg._util._is_stream(plaintext)` returns `True`). + """ + with source_app.app_context(): + with io.open(os.path.realpath(__file__)) as fh: + ciphertext = source_app.crypto_util.encrypt( fh, - [crypto_util.getkey(source.filesystem_id), + [source_app.crypto_util.getkey(test_source['filesystem_id']), config.JOURNALIST_KEY], - store.path(source.filesystem_id, 'somefile.gpg')) - plaintext = crypto_util.decrypt(codename, ciphertext) - - with open(os.path.realpath(__file__)) as fh: - self.assertEqual(fh.read(), plaintext) - - def test_encrypt_fingerprints_not_a_list_or_tuple(self): - """If passed a single fingerprint as a string, encrypt should - correctly place that string in a list, and encryption/ - decryption should work as intended.""" - source, codename = utils.db_helper.init_source() - message = str(os.urandom(1)) - ciphertext = crypto_util.encrypt( + source_app.storage.path(test_source['filesystem_id'], + 'somefile.gpg')) + plaintext = source_app.crypto_util.decrypt(test_source['codename'], + ciphertext) + + with io.open(os.path.realpath(__file__)) as fh: + assert fh.read() == plaintext + + +def test_encrypt_fingerprints_not_a_list_or_tuple(source_app, test_source): + """If passed a single fingerprint as a string, encrypt should + correctly place that string in a list, and encryption/ + decryption should work as intended.""" + message = 'test' + + with source_app.app_context(): + ciphertext = source_app.crypto_util.encrypt( message, - crypto_util.getkey(source.filesystem_id), - store.path(source.filesystem_id, 'somefile.gpg')) - plaintext = crypto_util.decrypt(codename, ciphertext) + source_app.crypto_util.getkey(test_source['filesystem_id']), + source_app.storage.path(test_source['filesystem_id'], + 'somefile.gpg')) + plaintext = source_app.crypto_util.decrypt(test_source['codename'], + ciphertext) + + assert plaintext == message + - self.assertEqual(message, plaintext) +def test_basic_encrypt_then_decrypt_multiple_recipients(source_app, + config, + test_source): + message = 'test' - def test_basic_encrypt_then_decrypt_multiple_recipients(self): - source, codename = utils.db_helper.init_source() - message = str(os.urandom(1)) - ciphertext = crypto_util.encrypt( + with source_app.app_context(): + ciphertext = source_app.crypto_util.encrypt( message, - [crypto_util.getkey(source.filesystem_id), + [source_app.crypto_util.getkey(test_source['filesystem_id']), config.JOURNALIST_KEY], - store.path(source.filesystem_id, 'somefile.gpg')) - plaintext = crypto_util.decrypt(codename, ciphertext) + source_app.storage.path(test_source['filesystem_id'], + 'somefile.gpg')) + plaintext = source_app.crypto_util.decrypt(test_source['codename'], + ciphertext) - self.assertEqual(message, plaintext) + assert plaintext == message # Since there's no way to specify which key to use for # decryption to python-gnupg, we delete the `source`'s key and # ensure we can decrypt with the `config.JOURNALIST_KEY`. - crypto_util.delete_reply_keypair(source.filesystem_id) - plaintext_ = crypto_util.gpg.decrypt(ciphertext).data - - self.assertEqual(message, plaintext_) - - def verify_genrandomid(self, locale): - id = crypto_util.genrandomid(locale=locale) - id_words = id.split() - - self.assertEqual(id, crypto_util.clean(id)) - self.assertEqual(len(id_words), crypto_util.DEFAULT_WORDS_IN_RANDOM_ID) - for word in id_words: - self.assertIn(word, crypto_util._get_wordlist(locale)) - - def test_genrandomid_default_locale_is_en(self): - self.verify_genrandomid('en') - - def test_get_wordlist(self): - locales = [] - wordlists_path = os.path.join(config.SECUREDROP_ROOT, 'wordlists') - for f in os.listdir(wordlists_path): - if f.endswith('.txt') and f != 'en.txt': - locales.append(f.split('.')[0]) - wordlist_en = crypto_util._get_wordlist('en') - for locale in locales: - self.assertNotEqual(wordlist_en, crypto_util._get_wordlist(locale)) - self.verify_genrandomid(locale) - self.assertEqual(wordlist_en, crypto_util._get_wordlist('unknown')) + source_app.crypto_util.delete_reply_keypair( + test_source['filesystem_id']) + plaintext = source_app.crypto_util.gpg.decrypt(ciphertext).data.decode('utf-8') + + assert plaintext == message + + +def verify_genrandomid(app, locale): + id = app.crypto_util.genrandomid(locale=locale) + id_words = id.split() + + assert crypto_util.clean(id) == id + assert len(id_words) == CryptoUtil.DEFAULT_WORDS_IN_RANDOM_ID + + for word in id_words: + assert word in app.crypto_util.get_wordlist(locale) - def test_display_id(self): - id = crypto_util.display_id() - id_words = id.split() - self.assertEqual(len(id_words), 2) - self.assertIn(id_words[0], crypto_util.adjectives) - self.assertIn(id_words[1], crypto_util.nouns) +def test_genrandomid_default_locale_is_en(source_app): + verify_genrandomid(source_app, 'en') + + +def test_get_wordlist(source_app, config): + locales = [] + wordlists_path = os.path.join(config.SECUREDROP_ROOT, 'wordlists') + for f in os.listdir(wordlists_path): + if f.endswith('.txt') and f != 'en.txt': + locales.append(f.split('.')[0]) + + with source_app.app_context(): + list_en = source_app.crypto_util.get_wordlist('en') + for locale in locales: + assert source_app.crypto_util.get_wordlist(locale) != list_en + verify_genrandomid(source_app, locale) + assert source_app.crypto_util.get_wordlist('unknown') == list_en + + +def test_hash_codename(source_app): + codename = source_app.crypto_util.genrandomid() + hashed_codename = source_app.crypto_util.hash_codename(codename) + + assert re.compile('^[2-7A-Z]{103}=$').match(hashed_codename) + + +def test_display_id(source_app): + id = source_app.crypto_util.display_id() + id_words = id.split() + + assert len(id_words) == 2 + assert id_words[0] in source_app.crypto_util.adjectives + assert id_words[1] in source_app.crypto_util.nouns + + +def test_genkeypair(source_app): + with source_app.app_context(): + codename = source_app.crypto_util.genrandomid() + filesystem_id = source_app.crypto_util.hash_codename(codename) + journalist_filename = source_app.crypto_util.display_id() + source = models.Source(filesystem_id, journalist_filename) + db.session.add(source) + db.session.commit() + source_app.crypto_util.genkeypair(source.filesystem_id, codename) - def test_hash_codename(self): - codename = crypto_util.genrandomid() - hashed_codename = crypto_util.hash_codename(codename) + assert source_app.crypto_util.getkey(filesystem_id) is not None - self.assertRegexpMatches(hashed_codename, '^[2-7A-Z]{103}=$') - def test_genkeypair(self): - codename = crypto_util.genrandomid() - filesystem_id = crypto_util.hash_codename(codename) - journalist_filename = crypto_util.display_id() - source = db.Source(filesystem_id, journalist_filename) - db.db_session.add(source) - db.db_session.commit() - crypto_util.genkeypair(source.filesystem_id, codename) +def parse_gpg_date_string(date_string): + """Parse a date string returned from `gpg --with-colons --list-keys` into a + datetime. - self.assertIsNotNone(crypto_util.getkey(filesystem_id)) + The format of the date strings is complicated; see gnupg doc/DETAILS for a + full explanation. - def test_delete_reply_keypair(self): - source, _ = utils.db_helper.init_source() - crypto_util.delete_reply_keypair(source.filesystem_id) + Key details: + - The creation date of the key is given in UTC. + - the date is usually printed in seconds since epoch, however, we are + migrating to an ISO 8601 format (e.g. "19660205T091500"). A simple + way to detect the new format is to scan for the 'T'. + """ + if 'T' in date_string: + dt = datetime.strptime(date_string, "%Y%m%dT%H%M%S") + else: + dt = datetime.utcfromtimestamp(int(date_string)) + return dt + + +def test_reply_keypair_creation_and_expiration_dates(source_app): + with source_app.app_context(): + codename = source_app.crypto_util.genrandomid() + filesystem_id = source_app.crypto_util.hash_codename(codename) + journalist_filename = source_app.crypto_util.display_id() + source = models.Source(filesystem_id, journalist_filename) + db.session.add(source) + db.session.commit() + source_app.crypto_util.genkeypair(source.filesystem_id, codename) + + # crypto_util.getkey only returns the fingerprint of the key. We need + # the full output of gpg.list_keys() to check the creation and + # expire dates. + # + # TODO: it might be generally useful to refactor crypto_util.getkey so + # it always returns the entire key dictionary instead of just the + # fingerprint (which is always easily extracted from the entire key + # dictionary). + new_key_fingerprint = source_app.crypto_util.getkey(filesystem_id) + new_key = [key for key in source_app.crypto_util.gpg.list_keys() + if new_key_fingerprint == key['fingerprint']][0] + + # All keys should share the same creation date to avoid leaking + # information about when sources first created accounts. + creation_date = parse_gpg_date_string(new_key['date']) + assert (creation_date.date() == + CryptoUtil.DEFAULT_KEY_CREATION_DATE) + + # Reply keypairs should not expire + expire_date = new_key['expires'] + assert expire_date == '' + + +def test_delete_reply_keypair(source_app, test_source): + fid = test_source['filesystem_id'] + source_app.crypto_util.delete_reply_keypair(fid) + assert source_app.crypto_util.getkey(fid) is None + + +def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source, + mocker, capsys): + """ + Regression test for https://github.com/freedomofpress/securedrop/issues/4294 + """ + fid = test_source['filesystem_id'] + + # Patch private python-gnupg method to reproduce the issue in #4294 + mocker.patch('pretty_bad_protocol._util._separate_keyword', + return_value=('PINENTRY_LAUNCHED', 'does not matter')) + + source_app.crypto_util.delete_reply_keypair(fid) + + captured = capsys.readouterr() + assert "ValueError: Unknown status message: 'PINENTRY_LAUNCHED'" not in captured.err + assert source_app.crypto_util.getkey(fid) is None + + +def test_delete_reply_keypair_no_key(source_app): + """No exceptions should be raised when provided a filesystem id that + does not exist. + """ + source_app.crypto_util.delete_reply_keypair('Reality Winner') + + +def test_getkey(source_app, test_source): + assert (source_app.crypto_util.getkey(test_source['filesystem_id']) + is not None) + + # check that a non-existent key returns None + assert source_app.crypto_util.getkey('x' * 50) is None + + +def test_export_pubkey(source_app, test_source): + begin_pgp = '-----BEGIN PGP PUBLIC KEY BLOCK----' + + # check that a filesystem_id exports the pubkey + exported = source_app.crypto_util.export_pubkey( + test_source['filesystem_id']) + assert exported.startswith(begin_pgp) + + # check that a non-existent identifer exports None + exported = source_app.crypto_util.export_pubkey('x' * 50) + assert exported is None + + +@given( + name=text(alphabet=crypto_util.DICEWARE_SAFE_CHARS), + secret=text(alphabet=crypto_util.DICEWARE_SAFE_CHARS), + message=text() +) +def test_encrypt_then_decrypt_gives_same_result( + source_app, + test_source, + name, + secret, + message +): + """Test that encrypting, then decrypting a string gives the original string. + + This is the first test case using `hypothesis`: + https://hypothesis.readthedocs.io + """ + crypto = source_app.crypto_util + + key = crypto.genkeypair( + name, + secret + ) + ciphertext = crypto.encrypt(message, str(key)) + decrypted_text = crypto.decrypt(secret, ciphertext) + + assert decrypted_text == message + + +def test_fifo_cache(): + cache = FIFOCache(3) - self.assertIsNone(crypto_util.getkey(source.filesystem_id)) + cache.put('item 1', 1) + cache.put('item 2', 2) + cache.put('item 3', 3) - def test_delete_reply_keypair_no_key(self): - """No exceptions should be raised when provided a filesystem id that - does not exist. - """ - crypto_util.delete_reply_keypair('Reality Winner') + assert cache.get('item 1') == 1 + assert cache.get('item 2') == 2 + assert cache.get('item 3') == 3 - def test_getkey(self): - source, _ = utils.db_helper.init_source() + cache.put('item 4', 4) + # Maxsize is 3, so adding item 4 should kick out item 1 + assert not cache.get('item 1') + assert cache.get('item 2') == 2 + assert cache.get('item 3') == 3 + assert cache.get('item 4') == 4 - self.assertIsNotNone(crypto_util.getkey(source.filesystem_id)) + cache.delete('item 2') + assert not cache.get('item 2') diff --git a/securedrop/tests/test_db.py b/securedrop/tests/test_db.py --- a/securedrop/tests/test_db.py +++ b/securedrop/tests/test_db.py @@ -1,82 +1,106 @@ # -*- coding: utf-8 -*- -from flask_testing import TestCase -import mock +import pytest -import journalist -from utils import db_helper, env -from db import (Journalist, Submission, Reply, get_one_or_else, - LoginThrottledException) +from mock import MagicMock +from .utils import db_helper +from models import (Journalist, Submission, Reply, Source, get_one_or_else, + LoginThrottledException) -class TestDatabase(TestCase): - def create_app(self): - return journalist.app +def test_source_public_key_setter_unimplemented(journalist_app, test_source): + with journalist_app.app_context(): + source = Source.query.first() + with pytest.raises(NotImplementedError): + source.public_key = 'a curious developer tries to set a pubkey!' - def setUp(self): - env.setup() - def tearDown(self): - env.teardown() +def test_source_public_key_delete_unimplemented(journalist_app, test_source): + with journalist_app.app_context(): + source = Source.query.first() + with pytest.raises(NotImplementedError): + del source.public_key - @mock.patch('flask.abort') - def test_get_one_or_else_returns_one(self, mock): - new_journo, _ = db_helper.init_journalist() - query = Journalist.query.filter( - Journalist.username == new_journo.username) - with mock.patch('logger') as mock_logger: - selected_journo = get_one_or_else(query, mock_logger, mock) - self.assertEqual(new_journo, selected_journo) +def test_get_one_or_else_returns_one(journalist_app, test_journo): + with journalist_app.app_context(): + # precondition: there must be one journalist + assert Journalist.query.count() == 1 - @mock.patch('flask.abort') - def test_get_one_or_else_multiple_results(self, mock): - journo_1, _ = db_helper.init_journalist() - journo_2, _ = db_helper.init_journalist() + query = Journalist.query.filter_by(username=test_journo['username']) + selected_journo = get_one_or_else(query, MagicMock(), MagicMock()) - with mock.patch('logger') as mock_logger: - get_one_or_else(Journalist.query, mock_logger, mock) - mock_logger.error.assert_called() # Not specifying very long log line - mock.assert_called_with(500) + assert selected_journo.id == test_journo['id'] - @mock.patch('flask.abort') - def test_get_one_or_else_no_result_found(self, mock): - query = Journalist.query.filter(Journalist.username == "alice") - with mock.patch('logger') as mock_logger: - get_one_or_else(query, mock_logger, mock) +def test_get_one_or_else_multiple_results(journalist_app, + test_admin, + test_journo): + with journalist_app.app_context(): + # precondition: there must be multiple journalists + assert Journalist.query.count() == 2 + + mock_logger = MagicMock() + mock_abort = MagicMock() + + # this is equivalent to "SELECT *" which we know returns 2 + query = Journalist.query + get_one_or_else(query, mock_logger, mock_abort) + # Not specifying the very long log line in `logger.error` + mock_logger.error.assert_called() + mock_abort.assert_called_with(500) + + +def test_get_one_or_else_no_result_found(journalist_app, test_journo): + with journalist_app.app_context(): + # precondition: there must be one journalist + assert Journalist.query.count() == 1 + + bad_name = test_journo['username'] + 'aaaaaa' + query = Journalist.query.filter_by(username=bad_name) + + mock_logger = MagicMock() + mock_abort = MagicMock() + get_one_or_else(query, mock_logger, mock_abort) + log_line = ('Found none when one was expected: ' 'No row was found for one()') mock_logger.error.assert_called_with(log_line) - mock.assert_called_with(404) + mock_abort.assert_called_with(404) + - # Check __repr__ do not throw exceptions +def test_throttle_login(journalist_app, test_journo): + with journalist_app.app_context(): + journalist = test_journo['journalist'] + for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD): + Journalist.throttle_login(journalist) + with pytest.raises(LoginThrottledException): + Journalist.throttle_login(journalist) - def test_submission_string_representation(self): - source, _ = db_helper.init_source() - db_helper.submit(source, 2) +def test_submission_string_representation(journalist_app, test_source): + with journalist_app.app_context(): + db_helper.submit(test_source['source'], 2) test_submission = Submission.query.first() test_submission.__repr__() - def test_reply_string_representation(self): - journalist, _ = db_helper.init_journalist() - source, _ = db_helper.init_source() - db_helper.reply(journalist, source, 2) + +def test_reply_string_representation(journalist_app, + test_journo, + test_source): + with journalist_app.app_context(): + db_helper.reply(test_journo['journalist'], + test_source['source'], + 2) test_reply = Reply.query.first() test_reply.__repr__() - def test_journalist_string_representation(self): - test_journalist, _ = db_helper.init_journalist() - test_journalist.__repr__() - def test_source_string_representation(self): - test_source, _ = db_helper.init_source() - test_source.__repr__() +def test_journalist_string_representation(journalist_app, test_journo): + with journalist_app.app_context(): + test_journo['journalist'].__repr__() - def test_throttle_login(self): - journalist, _ = db_helper.init_journalist() - for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD): - Journalist.throttle_login(journalist) - with self.assertRaises(LoginThrottledException): - Journalist.throttle_login(journalist) + +def test_source_string_representation(journalist_app, test_source): + with journalist_app.app_context(): + test_source['source'].__repr__() diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py --- a/securedrop/tests/test_i18n.py +++ b/securedrop/tests/test_i18n.py @@ -16,273 +16,280 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # -import argparse -import logging import os import re -from flask import request, session, render_template_string, render_template -from flask_babel import gettext -from werkzeug.datastructures import Headers - -os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config +from db import db import i18n -import journalist_app -import manage +import i18n_tool +import journalist_app as journalist_app_module import pytest import source_app -import version -import utils - - -class TestI18N(object): - - @classmethod - def setup_class(cls): - utils.env.setup() - - def get_fake_config(self): - class Config: - def __getattr__(self, name): - return getattr(config, name) - return Config() - - def test_get_supported_locales(self): - locales = ['en_US', 'fr_FR'] - assert ['en_US'] == i18n._get_supported_locales( - locales, None, None, None) - locales = ['en_US', 'fr_FR'] - supported = ['en_US', 'not_found'] - with pytest.raises(i18n.LocaleNotFound) as excinfo: - i18n._get_supported_locales(locales, supported, None, None) - assert "contains ['not_found']" in str(excinfo.value) - supported = ['fr_FR'] - locale = 'not_found' - with pytest.raises(i18n.LocaleNotFound) as excinfo: - i18n._get_supported_locales(locales, supported, locale, None) - assert "DEFAULT_LOCALE 'not_found'" in str(excinfo.value) - - def verify_i18n(self, app): - not_translated = 'code hello i18n' - translated_fr = 'code bonjour' - - for accepted in ('unknown', 'en_US'): - headers = Headers([('Accept-Language', accepted)]) - with app.test_request_context(headers=headers): - assert not hasattr(request, 'babel_locale') - assert not_translated == gettext(not_translated) - assert hasattr(request, 'babel_locale') - assert render_template_string(''' - {{ gettext('code hello i18n') }} - ''').strip() == not_translated - - for lang in ('fr_FR', 'fr', 'fr-FR'): - headers = Headers([('Accept-Language', lang)]) - with app.test_request_context(headers=headers): - assert not hasattr(request, 'babel_locale') - assert translated_fr == gettext(not_translated) - assert hasattr(request, 'babel_locale') - assert render_template_string(''' - {{ gettext('code hello i18n') }} - ''').strip() == translated_fr - - # https://github.com/freedomofpress/securedrop/issues/2379 - headers = Headers([('Accept-Language', - 'en-US;q=0.6,fr_FR;q=0.4,nb_NO;q=0.2')]) - with app.test_request_context(headers=headers): - assert not hasattr(request, 'babel_locale') - assert not_translated == gettext(not_translated) - - translated_cn = 'code chinese' - - for lang in ('zh-CN', 'zh-Hans-CN'): - headers = Headers([('Accept-Language', lang)]) - with app.test_request_context(headers=headers): - assert not hasattr(request, 'babel_locale') - assert translated_cn == gettext(not_translated) - assert hasattr(request, 'babel_locale') - assert render_template_string(''' - {{ gettext('code hello i18n') }} - ''').strip() == translated_cn - - translated_ar = 'code arabic' - - for lang in ('ar', 'ar-kw'): - headers = Headers([('Accept-Language', lang)]) - with app.test_request_context(headers=headers): - assert not hasattr(request, 'babel_locale') - assert translated_ar == gettext(not_translated) - assert hasattr(request, 'babel_locale') - assert render_template_string(''' - {{ gettext('code hello i18n') }} - ''').strip() == translated_ar - - with app.test_client() as c: - page = c.get('/login') - assert session.get('locale') is None - assert not_translated == gettext(not_translated) - assert '?l=fr_FR' in page.data - assert '?l=en_US' not in page.data - - page = c.get('/login?l=fr_FR', - headers=Headers([('Accept-Language', 'en_US')])) - assert session.get('locale') == 'fr_FR' - assert translated_fr == gettext(not_translated) - assert '?l=fr_FR' not in page.data - assert '?l=en_US' in page.data +from flask import render_template +from flask import render_template_string +from flask import request +from flask import session +from flask_babel import gettext +from sdconfig import SDConfig +from sh import pybabel +from sh import sed +from .utils.env import TESTS_DIR +from werkzeug.datastructures import Headers - c.get('/', headers=Headers([('Accept-Language', 'en_US')])) - assert session.get('locale') == 'fr_FR' - assert translated_fr == gettext(not_translated) +os.environ['SECUREDROP_ENV'] = 'test' # noqa - c.get('/?l=') - assert session.get('locale') is None - assert not_translated == gettext(not_translated) - c.get('/?l=en_US', headers=Headers([('Accept-Language', 'fr_FR')])) - assert session.get('locale') == 'en_US' - assert not_translated == gettext(not_translated) +def verify_i18n(app): + not_translated = 'code hello i18n' + translated_fr = 'code bonjour' - c.get('/', headers=Headers([('Accept-Language', 'fr_FR')])) - assert session.get('locale') == 'en_US' + for accepted in ('unknown', 'en_US'): + headers = Headers([('Accept-Language', accepted)]) + with app.test_request_context(headers=headers): + assert not hasattr(request, 'babel_locale') assert not_translated == gettext(not_translated) + assert hasattr(request, 'babel_locale') + assert render_template_string(''' + {{ gettext('code hello i18n') }} + ''').strip() == not_translated - c.get('/?l=', headers=Headers([('Accept-Language', 'fr_FR')])) - assert session.get('locale') is None + for lang in ('fr_FR', 'fr', 'fr-FR'): + headers = Headers([('Accept-Language', lang)]) + with app.test_request_context(headers=headers): + assert not hasattr(request, 'babel_locale') assert translated_fr == gettext(not_translated) + assert hasattr(request, 'babel_locale') + assert render_template_string(''' + {{ gettext('code hello i18n') }} + ''').strip() == translated_fr + + # https://github.com/freedomofpress/securedrop/issues/2379 + headers = Headers([('Accept-Language', + 'en-US;q=0.6,fr_FR;q=0.4,nb_NO;q=0.2')]) + with app.test_request_context(headers=headers): + assert not hasattr(request, 'babel_locale') + assert not_translated == gettext(not_translated) + + translated_cn = 'code chinese' + + for lang in ('zh-CN', 'zh-Hans-CN'): + headers = Headers([('Accept-Language', lang)]) + with app.test_request_context(headers=headers): + assert not hasattr(request, 'babel_locale') + assert translated_cn == gettext(not_translated) + assert hasattr(request, 'babel_locale') + assert render_template_string(''' + {{ gettext('code hello i18n') }} + ''').strip() == translated_cn - c.get('/') - assert session.get('locale') is None - assert not_translated == gettext(not_translated) - - c.get('/?l=YY_ZZ') - assert session.get('locale') is None - assert not_translated == gettext(not_translated) + translated_ar = 'code arabic' - with app.test_request_context(): - assert '' == render_template('locales.html') - - with app.test_client() as c: - c.get('/') - locales = render_template('locales.html') - assert '?l=fr_FR' in locales - assert '?l=en_US' not in locales - c.get('/?l=ar') - base = render_template('base.html') - assert 'dir="rtl"' in base - - # the canonical locale name is norsk bokmål but - # this is overriden with just norsk by i18n.NAME_OVERRIDES - with app.test_client() as c: - c.get('/?l=nb_NO') - base = render_template('base.html') - assert 'norsk' in base - assert 'norsk bo' not in base - - def test_i18n(self): - sources = [ - 'tests/i18n/code.py', - 'tests/i18n/template.html', - ] - kwargs = { - 'translations_dir': config.TEMP_DIR, - 'mapping': 'tests/i18n/babel.cfg', - 'source': sources, - 'extract_update': True, - 'compile': True, - 'verbose': logging.DEBUG, - 'version': version.__version__, - } - args = argparse.Namespace(**kwargs) - manage.setup_verbosity(args) - manage.translate_messages(args) - - manage.sh(""" - pybabel init -i {d}/messages.pot -d {d} -l en_US - - pybabel init -i {d}/messages.pot -d {d} -l fr_FR - sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code bonjour"/' \ - {d}/fr_FR/LC_MESSAGES/messages.po - - pybabel init -i {d}/messages.pot -d {d} -l zh_Hans_CN - sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code chinese"/' \ - {d}/zh_Hans_CN/LC_MESSAGES/messages.po - - pybabel init -i {d}/messages.pot -d {d} -l ar - sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code arabic"/' \ - {d}/ar/LC_MESSAGES/messages.po - - pybabel init -i {d}/messages.pot -d {d} -l nb_NO - sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code norwegian"/' \ - {d}/nb_NO/LC_MESSAGES/messages.po - - pybabel init -i {d}/messages.pot -d {d} -l es_ES - sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code spanish"/' \ - {d}/es_ES/LC_MESSAGES/messages.po - """.format(d=config.TEMP_DIR)) - - manage.translate_messages(args) - - fake_config = self.get_fake_config() - fake_config.SUPPORTED_LOCALES = [ - 'en_US', 'fr_FR', 'zh_Hans_CN', 'ar', 'nb_NO'] - fake_config.TRANSLATION_DIRS = config.TEMP_DIR - for app in (journalist_app.create_app(fake_config), - source_app.create_app(fake_config)): - assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES - self.verify_i18n(app) - - def test_verify_default_locale_en_us_if_not_defined_in_config(self): - class Config: - def __getattr__(self, name): - if name == 'DEFAULT_LOCALE': - raise AttributeError() - return getattr(config, name) - not_translated = 'code hello i18n' - with source_app.create_app(Config()).test_client() as c: - c.get('/') - assert not_translated == gettext(not_translated) - - def test_locale_to_rfc_5646(self): - assert i18n.locale_to_rfc_5646('en') == 'en' - assert i18n.locale_to_rfc_5646('en-US') == 'en' - assert i18n.locale_to_rfc_5646('en_US') == 'en' - assert i18n.locale_to_rfc_5646('en-us') == 'en' - assert i18n.locale_to_rfc_5646('zh-hant') == 'zh-Hant' - - def test_html_en_lang_correct(self): - fake_config = self.get_fake_config() - app = journalist_app.create_app(fake_config).test_client() - resp = app.get('/', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="en".*>').search(html), html - - app = source_app.create_app(fake_config).test_client() - resp = app.get('/', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="en".*>').search(html), html - - # check '/generate' too because '/' uses a different template - resp = app.get('/generate', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="en".*>').search(html), html - - def test_html_fr_lang_correct(self): - """Check that when the locale is fr_FR the lang property is correct""" - fake_config = self.get_fake_config() - fake_config.SUPPORTED_LOCALES = ['fr_FR', 'en_US'] - app = journalist_app.create_app(fake_config).test_client() - resp = app.get('/?l=fr_FR', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="fr".*>').search(html), html - - app = source_app.create_app(fake_config).test_client() - resp = app.get('/?l=fr_FR', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="fr".*>').search(html), html - - # check '/generate' too because '/' uses a different template - resp = app.get('/generate?l=fr_FR', follow_redirects=True) - html = resp.data.decode('utf-8') - assert re.compile('<html .*lang="fr".*>').search(html), html + for lang in ('ar', 'ar-kw'): + headers = Headers([('Accept-Language', lang)]) + with app.test_request_context(headers=headers): + assert not hasattr(request, 'babel_locale') + assert translated_ar == gettext(not_translated) + assert hasattr(request, 'babel_locale') + assert render_template_string(''' + {{ gettext('code hello i18n') }} + ''').strip() == translated_ar + + with app.test_client() as c: + page = c.get('/login') + assert session.get('locale') is None + assert not_translated == gettext(not_translated) + assert b'?l=fr_FR' in page.data + assert b'?l=en_US' not in page.data + + page = c.get('/login?l=fr_FR', + headers=Headers([('Accept-Language', 'en_US')])) + assert session.get('locale') == 'fr_FR' + assert translated_fr == gettext(not_translated) + assert b'?l=fr_FR' not in page.data + assert b'?l=en_US' in page.data + + c.get('/', headers=Headers([('Accept-Language', 'en_US')])) + assert session.get('locale') == 'fr_FR' + assert translated_fr == gettext(not_translated) + + c.get('/?l=') + assert session.get('locale') is None + assert not_translated == gettext(not_translated) + + c.get('/?l=en_US', headers=Headers([('Accept-Language', 'fr_FR')])) + assert session.get('locale') == 'en_US' + assert not_translated == gettext(not_translated) + + c.get('/', headers=Headers([('Accept-Language', 'fr_FR')])) + assert session.get('locale') == 'en_US' + assert not_translated == gettext(not_translated) + + c.get('/?l=', headers=Headers([('Accept-Language', 'fr_FR')])) + assert session.get('locale') is None + assert translated_fr == gettext(not_translated) + + c.get('/') + assert session.get('locale') is None + assert not_translated == gettext(not_translated) + + c.get('/?l=YY_ZZ') + assert session.get('locale') is None + assert not_translated == gettext(not_translated) + + with app.test_request_context(): + assert '' == render_template('locales.html') + + with app.test_client() as c: + c.get('/') + locales = render_template('locales.html') + assert '?l=fr_FR' in locales + assert '?l=en_US' not in locales + c.get('/?l=ar') + base = render_template('base.html') + assert 'dir="rtl"' in base + + # the canonical locale name is norsk bokmål but + # this is overriden with just norsk by i18n.NAME_OVERRIDES + with app.test_client() as c: + c.get('/?l=nb_NO') + base = render_template('base.html') + assert 'norsk' in base + assert 'norsk bo' not in base + + +def test_get_supported_locales(): + locales = ['en_US', 'fr_FR'] + assert ['en_US'] == i18n._get_supported_locales( + locales, None, None, None) + locales = ['en_US', 'fr_FR'] + supported = ['en_US', 'not_found'] + with pytest.raises(i18n.LocaleNotFound) as excinfo: + i18n._get_supported_locales(locales, supported, None, None) + assert "contains ['not_found']" in str(excinfo.value) + supported = ['fr_FR'] + locale = 'not_found' + with pytest.raises(i18n.LocaleNotFound) as excinfo: + i18n._get_supported_locales(locales, supported, locale, None) + assert "DEFAULT_LOCALE 'not_found'" in str(excinfo.value) + + +# Grab the journalist_app fixture to trigger creation of resources +def test_i18n(journalist_app, config): + # Then delete it because using it won't test what we want + del journalist_app + + sources = [ + os.path.join(TESTS_DIR, 'i18n/code.py'), + os.path.join(TESTS_DIR, 'i18n/template.html'), + ] + + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-messages', + '--mapping', os.path.join(TESTS_DIR, 'i18n/babel.cfg'), + '--translations-dir', config.TEMP_DIR, + '--sources', ",".join(sources), + '--extract-update', + ]) + + pot = os.path.join(config.TEMP_DIR, 'messages.pot') + pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', 'en_US') + + for (l, s) in (('fr_FR', 'code bonjour'), + ('zh_Hans_CN', 'code chinese'), + ('ar', 'code arabic'), + ('nb_NO', 'code norwegian'), + ('es_ES', 'code spanish')): + pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', l) + po = os.path.join(config.TEMP_DIR, l, 'LC_MESSAGES/messages.po') + sed('-i', '-e', + '/code hello i18n/,+1s/msgstr ""/msgstr "{}"/'.format(s), + po) + + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-messages', + '--translations-dir', config.TEMP_DIR, + '--compile', + ]) + + fake_config = SDConfig() + fake_config.SUPPORTED_LOCALES = [ + 'en_US', 'fr_FR', 'zh_Hans_CN', 'ar', 'nb_NO'] + fake_config.TRANSLATION_DIRS = config.TEMP_DIR + + # Use our config (and not an app fixture) because the i18n module + # grabs values at init time and we can't inject them later. + for app in (journalist_app_module.create_app(fake_config), + source_app.create_app(fake_config)): + with app.app_context(): + db.create_all() + assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES + verify_i18n(app) + + +def test_verify_default_locale_en_us_if_not_defined_in_config(config): + class Config: + def __getattr__(self, name): + if name == 'DEFAULT_LOCALE': + raise AttributeError() + return getattr(config, name) + not_translated = 'code hello i18n' + with source_app.create_app(Config()).test_client() as c: + with c.application.app_context(): + db.create_all() + c.get('/') + assert not_translated == gettext(not_translated) + + +def test_locale_to_rfc_5646(): + assert i18n.locale_to_rfc_5646('en') == 'en' + assert i18n.locale_to_rfc_5646('en-US') == 'en' + assert i18n.locale_to_rfc_5646('en_US') == 'en' + assert i18n.locale_to_rfc_5646('en-us') == 'en' + assert i18n.locale_to_rfc_5646('zh-hant') == 'zh-Hant' + + +# Grab the journalist_app fixture to trigger creation of resources +def test_html_en_lang_correct(journalist_app, config): + # Then delete it because using it won't test what we want + del journalist_app + + app = journalist_app_module.create_app(config).test_client() + resp = app.get('/', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="en".*>').search(html), html + + app = source_app.create_app(config).test_client() + resp = app.get('/', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="en".*>').search(html), html + + # check '/generate' too because '/' uses a different template + resp = app.get('/generate', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="en".*>').search(html), html + + +# Grab the journalist_app fixture to trigger creation of resources +def test_html_fr_lang_correct(journalist_app, config): + """Check that when the locale is fr_FR the lang property is correct""" + + # Then delete it because using it won't test what we want + del journalist_app + + config.SUPPORTED_LOCALES = ['fr_FR', 'en_US'] + app = journalist_app_module.create_app(config).test_client() + resp = app.get('/?l=fr_FR', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="fr".*>').search(html), html + + app = source_app.create_app(config).test_client() + resp = app.get('/?l=fr_FR', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="fr".*>').search(html), html + + # check '/generate' too because '/' uses a different template + resp = app.get('/generate?l=fr_FR', follow_redirects=True) + html = resp.data.decode('utf-8') + assert re.compile('<html .*lang="fr".*>').search(html), html diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_i18n_tool.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- + +import io +import os +from os.path import abspath, dirname, exists, getmtime, join, realpath +os.environ['SECUREDROP_ENV'] = 'test' # noqa +import i18n_tool +from mock import patch +import pytest +import shutil +import signal +import time + +from sh import sed, msginit, pybabel, git, touch + + +class TestI18NTool(object): + + def setup(self): + self.dir = abspath(dirname(realpath(__file__))) + + def test_main(self, tmpdir, caplog): + with pytest.raises(SystemExit): + i18n_tool.I18NTool().main(['--help']) + + tool = i18n_tool.I18NTool() + with patch.object(tool, + 'setup_verbosity', + side_effect=KeyboardInterrupt): + assert tool.main([ + 'translate-messages', + '--translations-dir', str(tmpdir) + ]) == signal.SIGINT + + def test_translate_desktop_l10n(self, tmpdir): + in_files = {} + for what in ('source', 'journalist'): + in_files[what] = join(str(tmpdir), what + '.desktop.in') + shutil.copy(join(self.dir, 'i18n/' + what + '.desktop.in'), + in_files[what]) + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-desktop', + '--translations-dir', str(tmpdir), + '--sources', in_files['source'], + '--extract-update', + ]) + messages_file = join(str(tmpdir), 'desktop.pot') + assert exists(messages_file) + with io.open(messages_file) as fobj: + pot = fobj.read() + assert 'SecureDrop Source Interfaces' in pot + # pretend this happened a few seconds ago + few_seconds_ago = time.time() - 60 + os.utime(messages_file, (few_seconds_ago, few_seconds_ago)) + + i18n_file = join(str(tmpdir), 'source.desktop') + + # + # Extract+update but do not compile + # + old_messages_mtime = getmtime(messages_file) + assert not exists(i18n_file) + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-desktop', + '--translations-dir', str(tmpdir), + '--sources', ",".join(list(in_files.values())), + '--extract-update', + ]) + assert not exists(i18n_file) + current_messages_mtime = getmtime(messages_file) + assert old_messages_mtime < current_messages_mtime + + locale = 'fr_FR' + po_file = join(str(tmpdir), locale + ".po") + msginit( + '--no-translator', + '--locale', locale, + '--output', po_file, + '--input', messages_file) + source = 'SecureDrop Source Interfaces' + sed('-i', '-e', + '/{}/,+1s/msgstr ""/msgstr "SOURCE FR"/'.format(source), + po_file) + assert exists(po_file) + + # Regression test to trigger bug introduced when adding + # Romanian as an accepted language. + locale = 'ro' + po_file = join(str(tmpdir), locale + ".po") + msginit( + '--no-translator', + '--locale', locale, + '--output', po_file, + '--input', messages_file) + source = 'SecureDrop Source Interfaces' + sed('-i', '-e', + '/{}/,+1s/msgstr ""/msgstr "SOURCE RO"/'.format(source), + po_file) + assert exists(po_file) + + # + # Compile but do not extract+update + # + old_messages_mtime = current_messages_mtime + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-desktop', + '--translations-dir', str(tmpdir), + '--sources', ",".join(list(in_files.values()) + ['BOOM']), + '--compile', + ]) + assert old_messages_mtime == getmtime(messages_file) + with io.open(po_file) as fobj: + po = fobj.read() + assert 'SecureDrop Source Interfaces' in po + assert 'SecureDrop Journalist Interfaces' not in po + with io.open(i18n_file) as fobj: + i18n = fobj.read() + assert 'SOURCE FR' in i18n + + def test_translate_messages_l10n(self, tmpdir): + source = [ + join(self.dir, 'i18n/code.py'), + join(self.dir, 'i18n/template.html'), + ] + args = [ + '--verbose', + 'translate-messages', + '--translations-dir', str(tmpdir), + '--mapping', join(self.dir, 'i18n/babel.cfg'), + '--sources', ",".join(source), + '--extract-update', + '--compile', + ] + i18n_tool.I18NTool().main(args) + messages_file = join(str(tmpdir), 'messages.pot') + assert exists(messages_file) + with io.open(messages_file, 'rb') as fobj: + pot = fobj.read() + assert b'code hello i18n' in pot + assert b'template hello i18n' in pot + + locale = 'en_US' + locale_dir = join(str(tmpdir), locale) + pybabel('init', '-i', messages_file, '-d', str(tmpdir), '-l', locale) + mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo') + assert not exists(mo_file) + i18n_tool.I18NTool().main(args) + assert exists(mo_file) + with io.open(mo_file, mode='rb') as fobj: + mo = fobj.read() + assert b'code hello i18n' in mo + assert b'template hello i18n' in mo + + def test_translate_messages_compile_arg(self, tmpdir): + args = [ + '--verbose', + 'translate-messages', + '--translations-dir', str(tmpdir), + '--mapping', join(self.dir, 'i18n/babel.cfg'), + ] + i18n_tool.I18NTool().main(args + [ + '--sources', join(self.dir, 'i18n/code.py'), + '--extract-update', + ]) + messages_file = join(str(tmpdir), 'messages.pot') + assert exists(messages_file) + with io.open(messages_file) as fobj: + pot = fobj.read() + assert 'code hello i18n' in pot + + locale = 'en_US' + locale_dir = join(str(tmpdir), locale) + po_file = join(locale_dir, 'LC_MESSAGES/messages.po') + pybabel(['init', '-i', messages_file, '-d', str(tmpdir), '-l', locale]) + assert exists(po_file) + # pretend this happened a few seconds ago + few_seconds_ago = time.time() - 60 + os.utime(po_file, (few_seconds_ago, few_seconds_ago)) + + mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo') + + # + # Extract+update but do not compile + # + old_po_mtime = getmtime(po_file) + assert not exists(mo_file) + i18n_tool.I18NTool().main(args + [ + '--sources', join(self.dir, 'i18n/code.py'), + '--extract-update', + ]) + assert not exists(mo_file) + current_po_mtime = getmtime(po_file) + assert old_po_mtime < current_po_mtime + + # + # Compile but do not extract+update + # + source = [ + join(self.dir, 'i18n/code.py'), + join(self.dir, 'i18n/template.html'), + ] + old_po_mtime = current_po_mtime + i18n_tool.I18NTool().main(args + [ + '--sources', ",".join(source), + '--compile', + ]) + assert old_po_mtime == getmtime(po_file) + with io.open(mo_file, mode='rb') as fobj: + mo = fobj.read() + assert b'code hello i18n' in mo + assert b'template hello i18n' not in mo + + def test_require_git_email_name(self, tmpdir): + k = {'_cwd': str(tmpdir)} + git('init', **k) + with pytest.raises(Exception) as excinfo: + i18n_tool.I18NTool.require_git_email_name(str(tmpdir)) + assert 'please set name' in str(excinfo.value) + + git.config('user.email', "[email protected]", **k) + git.config('user.name', "Your Name", **k) + assert i18n_tool.I18NTool.require_git_email_name(str(tmpdir)) + + def test_update_docs(self, tmpdir, caplog): + k = {'_cwd': str(tmpdir)} + git.init(**k) + git.config('user.email', "[email protected]", **k) + git.config('user.name', "Your Name", **k) + os.mkdir(join(str(tmpdir), 'includes')) + touch('includes/l10n.txt', **k) + git.add('includes/l10n.txt', **k) + git.commit('-m', 'init', **k) + + i18n_tool.I18NTool().main([ + '--verbose', + 'update-docs', + '--documentation-dir', str(tmpdir)]) + assert 'l10n.txt updated' in caplog.text + caplog.clear() + i18n_tool.I18NTool().main([ + '--verbose', + 'update-docs', + '--documentation-dir', str(tmpdir)]) + assert 'l10n.txt already up to date' in caplog.text + + def test_update_from_weblate(self, tmpdir, caplog): + d = str(tmpdir) + for repo in ('i18n', 'securedrop'): + os.mkdir(join(d, repo)) + k = {'_cwd': join(d, repo)} + git.init(**k) + git.config('user.email', '[email protected]', **k) + git.config('user.name', 'Loïc Nordhøy', **k) + touch('README.md', **k) + git.add('README.md', **k) + git.commit('-m', 'README', 'README.md', **k) + for o in os.listdir(join(self.dir, 'i18n')): + f = join(self.dir, 'i18n', o) + if os.path.isfile(f): + shutil.copyfile(f, join(d, 'i18n', o)) + else: + shutil.copytree(f, join(d, 'i18n', o)) + k = {'_cwd': join(d, 'i18n')} + git.add('securedrop', 'install_files', **k) + git.commit('-m', 'init', '-a', **k) + git.checkout('-b', 'i18n', 'master', **k) + + def r(): + return "".join([str(l) for l in caplog.records]) + + # + # de_DE is not amount the supported languages, it is not taken + # into account despite the fact that it exists in weblate. + # + caplog.clear() + i18n_tool.I18NTool().main([ + '--verbose', + 'update-from-weblate', + '--root', join(str(tmpdir), 'securedrop'), + '--url', join(str(tmpdir), 'i18n'), + '--supported-languages', 'nl', + ]) + assert 'l10n: updated Dutch (nl)' in r() + assert 'l10n: updated German (de_DE)' not in r() + + # + # de_DE is added but there is no change in the nl translation + # therefore nothing is done for nl + # + caplog.clear() + i18n_tool.I18NTool().main([ + '--verbose', + 'update-from-weblate', + '--root', join(str(tmpdir), 'securedrop'), + '--url', join(str(tmpdir), 'i18n'), + '--supported-languages', 'nl,de_DE', + ]) + assert 'l10n: updated Dutch (nl)' not in r() + assert 'l10n: updated German (de_DE)' in r() + + # + # nothing new for nl or de_DE: nothing is done + # + caplog.clear() + i18n_tool.I18NTool().main([ + '--verbose', + 'update-from-weblate', + '--root', join(str(tmpdir), 'securedrop'), + '--url', join(str(tmpdir), 'i18n'), + '--supported-languages', 'nl,de_DE', + ]) + assert 'l10n: updated Dutch (nl)' not in r() + assert 'l10n: updated German (de_DE)' not in r() + message = str(git('--no-pager', '-C', 'securedrop', 'show', + _cwd=d, _encoding='utf-8')) + assert "Loïc" in message + + # + # an update is done to nl in weblate + # + k = {'_cwd': join(d, 'i18n')} + f = 'securedrop/translations/nl/LC_MESSAGES/messages.po' + sed('-i', '-e', 's/inactiviteit/INACTIVITEIT/', f, **k) + git.add(f, **k) + git.config('user.email', '[email protected]', **k) + git.config('user.name', 'Someone Else', **k) + git.commit('-m', 'translation change', f, **k) + + k = {'_cwd': join(d, 'securedrop')} + git.config('user.email', '[email protected]', **k) + git.config('user.name', 'Someone Else', **k) + + # + # the nl translation update from weblate is copied + # over. + # + caplog.clear() + i18n_tool.I18NTool().main([ + '--verbose', + 'update-from-weblate', + '--root', join(str(tmpdir), 'securedrop'), + '--url', join(str(tmpdir), 'i18n'), + '--supported-languages', 'nl,de_DE', + ]) + assert 'l10n: updated Dutch (nl)' in r() + assert 'l10n: updated German (de_DE)' not in r() + message = str(git('--no-pager', '-C', 'securedrop', 'show', + _cwd=d)) + assert "Someone Else" in message + assert "Loïc" not in message diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -1,636 +1,748 @@ # -*- coding: utf-8 -*- -from cStringIO import StringIO + import gzip -import mock import os +import random import re -import shutil -import tempfile -import unittest import zipfile +from base64 import b32encode +from binascii import unhexlify +from distutils.version import StrictVersion +from io import BytesIO +import mock +import pytest from bs4 import BeautifulSoup -from flask import session, g, escape -from mock import patch -import gnupg +from flask import current_app, escape, g, session +from pyotp import HOTP, TOTP + +import journalist_app as journalist_app_module +from . import utils +from .utils.instrument import InstrumentedApp os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -import crypto_util -from db import db_session, Journalist -import journalist -import source -import store -import utils - - -class TestIntegration(unittest.TestCase): - - def _login_user(self): - self.journalist_app.post('/login', data=dict( - username=self.user.username, - password=self.user_pw, - token='mocked'), - follow_redirects=True) - - def setUp(self): - utils.env.setup() - - self.source_app = source.app.test_client() - self.journalist_app = journalist.app.test_client() - - self.gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR) - - # Patch the two-factor verification to avoid intermittent errors - patcher = mock.patch('db.Journalist.verify_token') - self.addCleanup(patcher.stop) - self.mock_journalist_verify_token = patcher.start() - self.mock_journalist_verify_token.return_value = True - - # Add a test user to the journalist interface and log them in - # print Journalist.query.all() - self.user_pw = "corret horse battery staple haha cultural reference" - self.user = Journalist(username="some-username", - password=self.user_pw) - db_session.add(self.user) - db_session.commit() - self._login_user() - - def tearDown(self): - utils.env.teardown() - - def test_submit_message(self): - """When a source creates an account, test that a new entry appears - in the journalist interface""" - test_msg = "This is a test message." - - with self.source_app as source_app: - resp = source_app.get('/generate') - resp = source_app.post('/create', follow_redirects=True) - filesystem_id = g.filesystem_id - # redirected to submission form - resp = self.source_app.post('/submit', data=dict( - msg=test_msg, - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - source_app.get('/logout') - # Request the Journalist Interface index - rv = self.journalist_app.get('/') - self.assertEqual(rv.status_code, 200) - self.assertIn("Sources", rv.data) - soup = BeautifulSoup(rv.data, 'html.parser') - # The source should have a "download unread" link that says "1 unread" +# Seed the RNG for deterministic testing +random.seed('ಠ_ಠ') + + [email protected](autouse=True, scope="module") +def patch_get_entropy_estimate(): + mock_get_entropy_estimate = mock.patch( + "source_app.main.get_entropy_estimate", + return_value=8192 + ).start() + + yield + + mock_get_entropy_estimate.stop() + + +def _login_user(app, user_dict): + resp = app.post('/login', + data={'username': user_dict['username'], + 'password': user_dict['password'], + 'token': TOTP(user_dict['otp_secret']).now()}, + follow_redirects=True) + assert resp.status_code == 200 + assert hasattr(g, 'user') # ensure logged in + + +def test_submit_message(source_app, journalist_app, test_journo): + """When a source creates an account, test that a new entry appears + in the journalist interface""" + test_msg = "This is a test message." + + with source_app.test_client() as app: + app.get('/generate') + app.post('/create', follow_redirects=True) + filesystem_id = g.filesystem_id + # redirected to submission form + resp = app.post('/submit', data=dict( + msg=test_msg, + fh=(BytesIO(b''), ''), + ), follow_redirects=True) + assert resp.status_code == 200 + app.get('/logout') + + # Request the Journalist Interface index + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Sources" in text + soup = BeautifulSoup(text, 'html.parser') + + # The source should have a "download unread" link that + # says "1 unread" col = soup.select('ul#cols > li')[0] unread_span = col.select('span.unread a')[0] - self.assertIn("1 unread", unread_span.get_text()) + assert "1 unread" in unread_span.get_text() col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') submission_url = soup.select('ul#submissions li a')[0]['href'] - self.assertIn("-msg", submission_url) + assert "-msg" in submission_url span = soup.select('ul#submissions li span.info span')[0] - self.assertRegexpMatches(span['title'], "\d+ bytes") + assert re.compile(r'\d+ bytes').match(span['title']) - resp = self.journalist_app.get(submission_url) - self.assertEqual(resp.status_code, 200) - decrypted_data = self.gpg.decrypt(resp.data) - self.assertTrue(decrypted_data.ok) - self.assertEqual(decrypted_data.data, test_msg) + resp = app.get(submission_url) + assert resp.status_code == 200 + decrypted_data = journalist_app.crypto_util.gpg.decrypt(resp.data) + assert decrypted_data.ok + assert decrypted_data.data.decode('utf-8') == test_msg # delete submission - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') doc_name = soup.select( 'ul > li > input[name="doc_names_selected"]')[0]['value'] - resp = self.journalist_app.post('/bulk', data=dict( + resp = app.post('/bulk', data=dict( action='confirm_delete', filesystem_id=filesystem_id, doc_names_selected=doc_name )) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') - self.assertIn("The following file has been selected for", resp.data) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') + assert "The following file has been selected for" in text # confirm delete submission doc_name = soup.select doc_name = soup.select( 'ul > li > input[name="doc_names_selected"]')[0]['value'] - resp = self.journalist_app.post('/bulk', data=dict( + resp = app.post('/bulk', data=dict( action='delete', filesystem_id=filesystem_id, doc_names_selected=doc_name, ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') - self.assertIn("Submission deleted.", resp.data) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') + assert "Submission deleted." in text # confirm that submission deleted and absent in list of submissions - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - self.assertIn("No documents to display.", resp.data) + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "No documents to display." in text # the file should be deleted from the filesystem - # since file deletion is handled by a polling worker, this test needs - # to wait for the worker to get the job and execute it - utils.async.wait_for_assertion( - lambda: self.assertFalse( - os.path.exists(store.path(filesystem_id, doc_name)) - ) - ) - - def test_submit_file(self): - """When a source creates an account, test that a new entry appears - in the journalist interface""" - test_file_contents = "This is a test file." - test_filename = "test.txt" - - with self.source_app as source_app: - resp = source_app.get('/generate') - resp = source_app.post('/create', follow_redirects=True) - filesystem_id = g.filesystem_id - # redirected to submission form - resp = self.source_app.post('/submit', data=dict( - msg="", - fh=(StringIO(test_file_contents), test_filename), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - source_app.get('/logout') - - resp = self.journalist_app.get('/') - self.assertEqual(resp.status_code, 200) - self.assertIn("Sources", resp.data) - soup = BeautifulSoup(resp.data, 'html.parser') - - # The source should have a "download unread" link that says "1 unread" + # since file deletion is handled by a polling worker, this test + # needs to wait for the worker to get the job and execute it + def assertion(): + assert not ( + os.path.exists(current_app.storage.path(filesystem_id, + doc_name))) + utils.asynchronous.wait_for_assertion(assertion) + + +def test_submit_file(source_app, journalist_app, test_journo): + """When a source creates an account, test that a new entry appears + in the journalist interface""" + test_file_contents = b"This is a test file." + test_filename = "test.txt" + + with source_app.test_client() as app: + app.get('/generate') + app.post('/create', follow_redirects=True) + filesystem_id = g.filesystem_id + # redirected to submission form + resp = app.post('/submit', data=dict( + msg="", + fh=(BytesIO(test_file_contents), test_filename), + ), follow_redirects=True) + assert resp.status_code == 200 + app.get('/logout') + + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Sources" in text + soup = BeautifulSoup(text, 'html.parser') + + # The source should have a "download unread" link that says + # "1 unread" col = soup.select('ul#cols > li')[0] unread_span = col.select('span.unread a')[0] - self.assertIn("1 unread", unread_span.get_text()) + assert "1 unread" in unread_span.get_text() col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') submission_url = soup.select('ul#submissions li a')[0]['href'] - self.assertIn("-doc", submission_url) + assert "-doc" in submission_url span = soup.select('ul#submissions li span.info span')[0] - self.assertRegexpMatches(span['title'], "\d+ bytes") + assert re.compile(r'\d+ bytes').match(span['title']) - resp = self.journalist_app.get(submission_url) - self.assertEqual(resp.status_code, 200) - decrypted_data = self.gpg.decrypt(resp.data) - self.assertTrue(decrypted_data.ok) + resp = app.get(submission_url) + assert resp.status_code == 200 + decrypted_data = journalist_app.crypto_util.gpg.decrypt(resp.data) + assert decrypted_data.ok - sio = StringIO(decrypted_data.data) + sio = BytesIO(decrypted_data.data) with gzip.GzipFile(mode='rb', fileobj=sio) as gzip_file: unzipped_decrypted_data = gzip_file.read() - self.assertEqual(unzipped_decrypted_data, test_file_contents) + mtime = gzip_file.mtime + assert unzipped_decrypted_data == test_file_contents + # Verify gzip file metadata and ensure timestamp is not present. + assert mtime == 0 # delete submission - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') doc_name = soup.select( 'ul > li > input[name="doc_names_selected"]')[0]['value'] - resp = self.journalist_app.post('/bulk', data=dict( + resp = app.post('/bulk', data=dict( action='confirm_delete', filesystem_id=filesystem_id, doc_names_selected=doc_name )) - self.assertEqual(resp.status_code, 200) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "The following file has been selected for" in text soup = BeautifulSoup(resp.data, 'html.parser') - self.assertIn("The following file has been selected for", resp.data) # confirm delete submission doc_name = soup.select doc_name = soup.select( 'ul > li > input[name="doc_names_selected"]')[0]['value'] - resp = self.journalist_app.post('/bulk', data=dict( + resp = app.post('/bulk', data=dict( action='delete', filesystem_id=filesystem_id, doc_names_selected=doc_name, ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Submission deleted." in text soup = BeautifulSoup(resp.data, 'html.parser') - self.assertIn("Submission deleted.", resp.data) # confirm that submission deleted and absent in list of submissions - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - self.assertIn("No documents to display.", resp.data) + resp = app.get(col_url) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "No documents to display." in text # the file should be deleted from the filesystem - # since file deletion is handled by a polling worker, this test needs - # to wait for the worker to get the job and execute it - utils.async.wait_for_assertion( - lambda: self.assertFalse( - os.path.exists(store.path(filesystem_id, doc_name)) - ) - ) - - def test_reply_normal(self): - self.helper_test_reply("This is a test reply.", True) - - def test_unicode_reply_with_ansi_env(self): - # This makes python-gnupg handle encoding equivalent to if we were - # running SD in an environment where os.getenv("LANG") == "C". - # Unfortunately, with the way our test suite is set up simply setting - # that env var here will not have the desired effect. Instead we - # monkey-patch the GPG object that is called crypto_util to imitate the - # _encoding attribute it would have had it been initialized in a "C" - # environment. See - # https://github.com/freedomofpress/securedrop/issues/1360 for context. - old_encoding = crypto_util.gpg._encoding - crypto_util.gpg._encoding = "ansi_x3.4_1968" - try: - self.helper_test_reply("ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ", True) - finally: - crypto_util.gpg._encoding = old_encoding - - def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None): - """ - Test that the given GPG message can be decrypted with the given key - (identified by its fingerprint). - """ - # GPG does not provide a way to specify which key to use to decrypt a - # message. Since the default keyring that we use has both the - # `config.JOURNALIST_KEY` and all of the reply keypairs, there's no way - # to use it to test whether a message is decryptable with a specific - # key. - gpg_tmp_dir = tempfile.mkdtemp() - gpg = gnupg.GPG(homedir=gpg_tmp_dir) - - # Export the key of interest from the application's keyring - pubkey = self.gpg.export_keys(key_fpr) - seckey = self.gpg.export_keys(key_fpr, secret=True) - # Import it into our isolated temporary GPG directory - for key in (pubkey, seckey): - gpg.import_keys(key) - - # Attempt decryption with the given key - if passphrase: - passphrase = crypto_util.hash_codename( - passphrase, - salt=crypto_util.SCRYPT_GPG_PEPPER) - decrypted_data = gpg.decrypt(msg, passphrase=passphrase) - self.assertTrue( - decrypted_data.ok, - "Could not decrypt msg with key, gpg says: {}".format( - decrypted_data.stderr)) - - # We have to clean up the temporary GPG dir - shutil.rmtree(gpg_tmp_dir) - - def helper_test_reply(self, test_reply, expected_success=True): - test_msg = "This is a test message." - - with self.source_app as source_app: - resp = source_app.get('/generate') - resp = source_app.post('/create', follow_redirects=True) - codename = session['codename'] - filesystem_id = g.filesystem_id - # redirected to submission form - resp = source_app.post('/submit', data=dict( - msg=test_msg, - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertFalse(g.source.flagged) - source_app.get('/logout') - - resp = self.journalist_app.get('/') - self.assertEqual(resp.status_code, 200) - self.assertIn("Sources", resp.data) + # since file deletion is handled by a polling worker, this test + # needs to wait for the worker to get the job and execute it + def assertion(): + assert not ( + os.path.exists(current_app.storage.path(filesystem_id, + doc_name))) + utils.asynchronous.wait_for_assertion(assertion) + + +def _helper_test_reply(journalist_app, source_app, config, test_journo, + test_reply, expected_success=True): + test_msg = "This is a test message." + + with source_app.test_client() as app: + app.get('/generate') + app.post('/create', follow_redirects=True) + codename = session['codename'] + filesystem_id = g.filesystem_id + # redirected to submission form + resp = app.post('/submit', data=dict( + msg=test_msg, + fh=(BytesIO(b''), ''), + ), follow_redirects=True) + assert resp.status_code == 200 + assert not g.source.flagged + app.get('/logout') + + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Sources" in text soup = BeautifulSoup(resp.data, 'html.parser') col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(col_url) - self.assertEqual(resp.status_code, 200) - - with self.source_app as source_app: - resp = source_app.post('/login', data=dict( - codename=codename), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertFalse(g.source.flagged) - source_app.get('/logout') - - with self.journalist_app as journalist_app: - resp = journalist_app.post('/flag', data=dict( - filesystem_id=filesystem_id)) - self.assertEqual(resp.status_code, 200) - - with self.source_app as source_app: - resp = source_app.post('/login', data=dict( - codename=codename), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertTrue(g.source.flagged) - source_app.get('/lookup') - self.assertTrue(g.source.flagged) - source_app.get('/logout') - - # Block up to 15s for the reply keypair, so we can test sending a reply - utils.async.wait_for_assertion( - lambda: self.assertNotEqual(crypto_util.getkey(filesystem_id), - None), - 15) - - # Create 2 replies to test deleting on journalist and source interface + resp = app.get(col_url) + assert resp.status_code == 200 + + with source_app.test_client() as app: + resp = app.post('/login', data=dict( + codename=codename), follow_redirects=True) + assert resp.status_code == 200 + assert not g.source.flagged + app.get('/logout') + + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.post('/flag', data=dict( + filesystem_id=filesystem_id)) + assert resp.status_code == 200 + + with source_app.test_client() as app: + resp = app.post('/login', data=dict( + codename=codename), follow_redirects=True) + assert resp.status_code == 200 + app.get('/lookup') + assert g.source.flagged + app.get('/logout') + + # Block up to 15s for the reply keypair, so we can test sending a reply + def assertion(): + assert current_app.crypto_util.getkey(filesystem_id) is not None + utils.asynchronous.wait_for_assertion(assertion, 15) + + # Create 2 replies to test deleting on journalist and source interface + with journalist_app.test_client() as app: + _login_user(app, test_journo) for i in range(2): - resp = self.journalist_app.post('/reply', data=dict( + resp = app.post('/reply', data=dict( filesystem_id=filesystem_id, message=test_reply ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) + assert resp.status_code == 200 if not expected_success: pass else: - self.assertIn("Thanks. Your reply has been stored.", resp.data) + text = resp.data.decode('utf-8') + assert "Thanks. Your reply has been stored." in text + + resp = app.get(col_url) + text = resp.data.decode('utf-8') + assert "reply-" in text + + soup = BeautifulSoup(text, 'html.parser') + + # Download the reply and verify that it can be decrypted with the + # journalist's key as well as the source's reply key + filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value'] + checkbox_values = [ + soup.select('input[name="doc_names_selected"]')[1]['value']] + resp = app.post('/bulk', data=dict( + filesystem_id=filesystem_id, + action='download', + doc_names_selected=checkbox_values + ), follow_redirects=True) + assert resp.status_code == 200 + + zf = zipfile.ZipFile(BytesIO(resp.data), 'r') + data = zf.read(zf.namelist()[0]) + _can_decrypt_with_key(journalist_app, data) + _can_decrypt_with_key( + journalist_app, + data, + codename) + + # Test deleting reply on the journalist interface + last_reply_number = len( + soup.select('input[name="doc_names_selected"]')) - 1 + _helper_filenames_delete(app, soup, last_reply_number) + + with source_app.test_client() as app: + resp = app.post('/login', data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.get('/lookup') + assert resp.status_code == 200 + text = resp.data.decode('utf-8') - with self.journalist_app as journalist_app: - resp = journalist_app.get(col_url) - self.assertIn("reply-", resp.data) - - soup = BeautifulSoup(resp.data, 'html.parser') - - # Download the reply and verify that it can be decrypted with the - # journalist's key as well as the source's reply key - filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value'] - checkbox_values = [ - soup.select('input[name="doc_names_selected"]')[1]['value']] - resp = self.journalist_app.post('/bulk', data=dict( - filesystem_id=filesystem_id, - action='download', - doc_names_selected=checkbox_values - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - - zf = zipfile.ZipFile(StringIO(resp.data), 'r') - data = zf.read(zf.namelist()[0]) - self._can_decrypt_with_key(data, config.JOURNALIST_KEY) - self._can_decrypt_with_key(data, crypto_util.getkey(filesystem_id), - codename) - - # Test deleting reply on the journalist interface - last_reply_number = len( - soup.select('input[name="doc_names_selected"]')) - 1 - self.helper_filenames_delete(soup, last_reply_number) - - with self.source_app as source_app: - resp = source_app.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - resp = source_app.get('/lookup') - self.assertEqual(resp.status_code, 200) - - if not expected_success: - # there should be no reply - self.assertNotIn("You have received a reply.", resp.data) - else: - self.assertIn( - "You have received a reply. To protect your identity", - resp.data) - self.assertIn(test_reply, resp.data) - soup = BeautifulSoup(resp.data, 'html.parser') - msgid = soup.select( - 'form.message > input[name="reply_filename"]')[0]['value'] - resp = source_app.post('/delete', data=dict( - filesystem_id=filesystem_id, - reply_filename=msgid - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Reply deleted", resp.data) - - # Make sure the reply is deleted from the filesystem - utils.async.wait_for_assertion( - lambda: self.assertFalse(os.path.exists( - store.path(filesystem_id, msgid)))) - - source_app.get('/logout') - - @patch('source_app.main.async_genkey') - def test_delete_collection(self, async_genkey): - """Test the "delete collection" button on each collection page""" - # first, add a source - self.source_app.get('/generate') - self.source_app.post('/create') - resp = self.source_app.post('/submit', data=dict( + if not expected_success: + # there should be no reply + assert "You have received a reply." not in text + else: + assert ("You have received a reply. To protect your identity" + in text) + assert test_reply in text, text + soup = BeautifulSoup(text, 'html.parser') + msgid = soup.select( + 'form.message > input[name="reply_filename"]')[0]['value'] + resp = app.post('/delete', data=dict( + filesystem_id=filesystem_id, + reply_filename=msgid + ), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Reply deleted" in text + + app.get('/logout') + + +def _helper_filenames_delete(journalist_app, soup, i): + filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value'] + checkbox_values = [ + soup.select('input[name="doc_names_selected"]')[i]['value']] + + # delete + resp = journalist_app.post('/bulk', data=dict( + filesystem_id=filesystem_id, + action='confirm_delete', + doc_names_selected=checkbox_values + ), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert (("The following file has been selected for" + " <strong>permanent deletion</strong>") in text) + + # confirm delete + resp = journalist_app.post('/bulk', data=dict( + filesystem_id=filesystem_id, + action='delete', + doc_names_selected=checkbox_values + ), follow_redirects=True) + assert resp.status_code == 200 + assert "Submission deleted." in resp.data.decode('utf-8') + + # Make sure the files were deleted from the filesystem + def assertion(): + assert not any([os.path.exists(current_app.storage.path(filesystem_id, + doc_name)) + for doc_name in checkbox_values]) + utils.asynchronous.wait_for_assertion(assertion) + + +def _can_decrypt_with_key(journalist_app, msg, passphrase=None): + """ + Test that the given GPG message can be decrypted. + """ + + # For gpg 2.1+, a non null passphrase _must_ be passed to decrypt() + using_gpg_2_1 = StrictVersion( + journalist_app.crypto_util.gpg.binary_version) >= StrictVersion('2.1') + + if passphrase: + passphrase = journalist_app.crypto_util.hash_codename( + passphrase, + salt=journalist_app.crypto_util.scrypt_gpg_pepper) + elif using_gpg_2_1: + passphrase = 'dummy passphrase' + + decrypted_data = journalist_app.crypto_util.gpg.decrypt( + msg, passphrase=passphrase) + assert decrypted_data.ok, \ + "Could not decrypt msg with key, gpg says: {}" \ + .format(decrypted_data.stderr) + + +def test_reply_normal(journalist_app, + source_app, + test_journo, + config): + '''Test for regression on #1360 (failure to encode bytes before calling + gpg functions). + ''' + journalist_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" + source_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" + _helper_test_reply(journalist_app, source_app, config, test_journo, + "This is a test reply.", True) + + +def test_unicode_reply_with_ansi_env(journalist_app, + source_app, + test_journo, + config): + # This makes python-gnupg handle encoding equivalent to if we were + # running SD in an environment where os.getenv("LANG") == "C". + # Unfortunately, with the way our test suite is set up simply setting + # that env var here will not have the desired effect. Instead we + # monkey-patch the GPG object that is called crypto_util to imitate the + # _encoding attribute it would have had it been initialized in a "C" + # environment. See + # https://github.com/freedomofpress/securedrop/issues/1360 for context. + journalist_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" + source_app.crypto_util.gpg._encoding = "ansi_x3.4_1968" + _helper_test_reply(journalist_app, source_app, config, test_journo, + "ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ", True) + + +def test_delete_collection(mocker, source_app, journalist_app, test_journo): + """Test the "delete collection" button on each collection page""" + async_genkey = mocker.patch('source_app.main.async_genkey') + + # first, add a source + with source_app.test_client() as app: + app.get('/generate') + app.post('/create') + resp = app.post('/submit', data=dict( msg="This is a test.", - fh=(StringIO(''), ''), + fh=(BytesIO(b''), ''), ), follow_redirects=True) + assert resp.status_code == 200 - assert resp.status_code == 200, resp.data.decode('utf-8') - - resp = self.journalist_app.get('/') + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') # navigate to the collection page - soup = BeautifulSoup(resp.data, 'html.parser') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') first_col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(first_col_url) - self.assertEqual(resp.status_code, 200) + resp = app.get(first_col_url) + assert resp.status_code == 200 # find the delete form and extract the post parameters - soup = BeautifulSoup(resp.data, 'html.parser') - delete_form_inputs = soup.select('form#delete-collection')[0]('input') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') + delete_form_inputs = soup.select( + 'form#delete-collection')[0]('input') filesystem_id = delete_form_inputs[1]['value'] col_name = delete_form_inputs[2]['value'] - resp = self.journalist_app.post('/col/delete/' + filesystem_id, - follow_redirects=True) - self.assertEquals(resp.status_code, 200) + resp = app.post('/col/delete/' + filesystem_id, + follow_redirects=True) + assert resp.status_code == 200 - self.assertIn(escape("%s's collection deleted" % (col_name,)), - resp.data) - self.assertIn("No documents have been submitted!", resp.data) - self.assertTrue(async_genkey.called) + text = resp.data.decode('utf-8') + assert escape("{}'s collection deleted".format(col_name)) in text + assert "No documents have been submitted!" in text + assert async_genkey.called # Make sure the collection is deleted from the filesystem - utils.async.wait_for_assertion( - lambda: self.assertFalse(os.path.exists(store.path(filesystem_id))) - ) - - @patch('source_app.main.async_genkey') - def test_delete_collections(self, async_genkey): - """Test the "delete selected" checkboxes on the index page that can be - used to delete multiple collections""" - # first, add some sources + def assertion(): + assert not os.path.exists(current_app.storage.path(filesystem_id)) + + utils.asynchronous.wait_for_assertion(assertion) + + +def test_delete_collections(mocker, journalist_app, source_app, test_journo): + """Test the "delete selected" checkboxes on the index page that can be + used to delete multiple collections""" + async_genkey = mocker.patch('source_app.main.async_genkey') + + # first, add some sources + with source_app.test_client() as app: num_sources = 2 for i in range(num_sources): - self.source_app.get('/generate') - self.source_app.post('/create') - self.source_app.post('/submit', data=dict( + app.get('/generate') + app.post('/create') + app.post('/submit', data=dict( msg="This is a test " + str(i) + ".", - fh=(StringIO(''), ''), + fh=(BytesIO(b''), ''), ), follow_redirects=True) - self.source_app.get('/logout') + app.get('/logout') - resp = self.journalist_app.get('/') + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') # get all the checkbox values - soup = BeautifulSoup(resp.data, 'html.parser') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') checkbox_values = [checkbox['value'] for checkbox in soup.select('input[name="cols_selected"]')] - resp = self.journalist_app.post('/col/process', data=dict( + resp = app.post('/col/process', data=dict( action='delete', cols_selected=checkbox_values ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("%s collections deleted" % (num_sources,), resp.data) - self.assertTrue(async_genkey.called) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "{} collections deleted".format(num_sources) in text + assert async_genkey.called # Make sure the collections are deleted from the filesystem - utils.async.wait_for_assertion(lambda: self.assertFalse( - any([os.path.exists(store.path(filesystem_id)) - for filesystem_id in checkbox_values]))) - - def test_filenames(self): - """Test pretty, sequential filenames when source uploads messages - and files""" - # add a source and submit stuff - self.source_app.get('/generate') - self.source_app.post('/create') - self.helper_filenames_submit() - - # navigate to the collection page - resp = self.journalist_app.get('/') - soup = BeautifulSoup(resp.data, 'html.parser') + def assertion(): + assert not ( + any([os.path.exists(current_app.storage.path(filesystem_id)) + for filesystem_id in checkbox_values])) + + utils.asynchronous.wait_for_assertion(assertion) + + +def _helper_filenames_submit(app): + app.post('/submit', data=dict( + msg="This is a test.", + fh=(BytesIO(b''), ''), + ), follow_redirects=True) + app.post('/submit', data=dict( + msg="This is a test.", + fh=(BytesIO(b'This is a test'), 'test.txt'), + ), follow_redirects=True) + app.post('/submit', data=dict( + msg="", + fh=(BytesIO(b'This is a test'), 'test.txt'), + ), follow_redirects=True) + + +def test_filenames(source_app, journalist_app, test_journo): + """Test pretty, sequential filenames when source uploads messages + and files""" + # add a source and submit stuff + with source_app.test_client() as app: + app.get('/generate') + app.post('/create') + _helper_filenames_submit(app) + + # navigate to the collection page + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') first_col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(first_col_url) - self.assertEqual(resp.status_code, 200) + resp = app.get(first_col_url) + assert resp.status_code == 200 # test filenames and sort order - soup = BeautifulSoup(resp.data, 'html.parser') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$' for i, submission_link in enumerate( soup.select('ul#submissions li a .filename')): filename = str(submission_link.contents[0]) - self.assertTrue(re.match(submission_filename_re.format(i + 1), - filename)) - - def test_filenames_delete(self): - """Test pretty, sequential filenames when journalist deletes files""" - # add a source and submit stuff - self.source_app.get('/generate') - self.source_app.post('/create') - self.helper_filenames_submit() - - # navigate to the collection page - resp = self.journalist_app.get('/') - soup = BeautifulSoup(resp.data, 'html.parser') + assert re.match(submission_filename_re.format(i + 1), filename) + + +def test_filenames_delete(journalist_app, source_app, test_journo): + """Test pretty, sequential filenames when journalist deletes files""" + # add a source and submit stuff + with source_app.test_client() as app: + app.get('/generate') + app.post('/create') + _helper_filenames_submit(app) + + # navigate to the collection page + with journalist_app.test_client() as app: + _login_user(app, test_journo) + resp = app.get('/') + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') first_col_url = soup.select('ul#cols > li a')[0]['href'] - resp = self.journalist_app.get(first_col_url) - self.assertEqual(resp.status_code, 200) - soup = BeautifulSoup(resp.data, 'html.parser') + resp = app.get(first_col_url) + assert resp.status_code == 200 + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') # delete file #2 - self.helper_filenames_delete(soup, 1) - resp = self.journalist_app.get(first_col_url) - soup = BeautifulSoup(resp.data, 'html.parser') + _helper_filenames_delete(app, soup, 1) + resp = app.get(first_col_url) + soup = BeautifulSoup(resp.data.decode('utf-8'), 'html.parser') # test filenames and sort order submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$' filename = str( soup.select('ul#submissions li a .filename')[0].contents[0]) - self.assertTrue(re.match(submission_filename_re.format(1), filename)) + assert re.match(submission_filename_re.format(1), filename) filename = str( soup.select('ul#submissions li a .filename')[1].contents[0]) - self.assertTrue(re.match(submission_filename_re.format(3), filename)) + assert re.match(submission_filename_re.format(3), filename) filename = str( soup.select('ul#submissions li a .filename')[2].contents[0]) - self.assertTrue(re.match(submission_filename_re.format(4), filename)) + assert re.match(submission_filename_re.format(4), filename) + - def test_user_change_password(self): - """Test that a journalist can successfully login after changing - their password""" +def test_user_change_password(journalist_app, test_journo): + """Test that a journalist can successfully login after changing + their password""" + with journalist_app.test_client() as app: + _login_user(app, test_journo) # change password new_pw = 'another correct horse battery staply long password' - self.journalist_app.post('/account/new-password', - data=dict(password=new_pw, - current_password=self.user_pw, - token='mocked')) - + assert new_pw != test_journo['password'] # precondition + app.post('/account/new-password', + data=dict(password=new_pw, + current_password=test_journo['password'], + token=TOTP(test_journo['otp_secret']).now())) # logout - self.journalist_app.get('/logout') + app.get('/logout') + # start a new client/context to be sure we've cleared the session + with journalist_app.test_client() as app: # login with new credentials should redirect to index page - resp = self.journalist_app.post('/login', data=dict( - username=self.user.username, - password=new_pw, - token='mocked', - follow_redirects=True)) - self.assertEqual(resp.status_code, 302) - - def test_login_after_regenerate_hotp(self): - """Test that journalists can login after resetting their HOTP 2fa""" - - # edit hotp - self.journalist_app.post('/account/reset-2fa-hotp', data=dict( - otp_secret=123456)) - - # successful verificaton should redirect to /account - resp = self.journalist_app.post('/account/2fa', data=dict( - token=self.user.hotp)) - self.assertEqual(resp.status_code, 302) + with InstrumentedApp(journalist_app) as ins: + resp = app.post('/login', data=dict( + username=test_journo['username'], + password=new_pw, + token=TOTP(test_journo['otp_secret']).now())) + ins.assert_redirects(resp, '/') + + +def test_login_after_regenerate_hotp(journalist_app, test_journo): + """Test that journalists can login after resetting their HOTP 2fa""" + + otp_secret = 'aaaaaa' + b32_otp_secret = b32encode(unhexlify(otp_secret)) + + # edit hotp + with journalist_app.test_client() as app: + _login_user(app, test_journo) + with InstrumentedApp(journalist_app) as ins: + resp = app.post('/account/reset-2fa-hotp', + data=dict(otp_secret=otp_secret)) + # valid otp secrets should redirect + ins.assert_redirects(resp, '/account/2fa') + + resp = app.post('/account/2fa', + data=dict(token=HOTP(b32_otp_secret).at(0))) + # successful verificaton should redirect to /account/account + ins.assert_redirects(resp, '/account/account') # log out - self.journalist_app.get('/logout') - - # login with new 2fa secret should redirect to index page - resp = self.journalist_app.post('/login', data=dict( - username=self.user.username, - password=self.user_pw, - token=self.user.hotp, - follow_redirects=True)) - self.assertEqual(resp.status_code, 302) - - def helper_filenames_submit(self): - self.source_app.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.source_app.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO('This is a test'), 'test.txt'), - ), follow_redirects=True) - self.source_app.post('/submit', data=dict( - msg="", - fh=(StringIO('This is a test'), 'test.txt'), - ), follow_redirects=True) - - def helper_filenames_delete(self, soup, i): - filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value'] - checkbox_values = [ - soup.select('input[name="doc_names_selected"]')[i]['value']] - - # delete - resp = self.journalist_app.post('/bulk', data=dict( - filesystem_id=filesystem_id, - action='confirm_delete', - doc_names_selected=checkbox_values - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn( - "The following file has been selected for" - " <strong>permanent deletion</strong>", - resp.data) - - # confirm delete - resp = self.journalist_app.post('/bulk', data=dict( - filesystem_id=filesystem_id, - action='delete', - doc_names_selected=checkbox_values - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Submission deleted.", resp.data) - - # Make sure the files were deleted from the filesystem - utils.async.wait_for_assertion(lambda: self.assertFalse( - any([os.path.exists(store.path(filesystem_id, doc_name)) - for doc_name in checkbox_values]))) + app.get('/logout') + + # start a new client/context to be sure we've cleared the session + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + # login with new 2fa secret should redirect to index page + resp = app.post('/login', data=dict( + username=test_journo['username'], + password=test_journo['password'], + token=HOTP(b32_otp_secret).at(1))) + ins.assert_redirects(resp, '/') + + +def test_prevent_document_uploads(source_app, journalist_app, test_admin): + '''Test that the source interface accepts only messages when + allow_document_uploads == False. + + ''' + + # Set allow_document_uploads = False: + with journalist_app.test_client() as app: + _login_user(app, test_admin) + form = journalist_app_module.forms.SubmissionPreferencesForm( + prevent_document_uploads=True) + resp = app.post('/admin/update-submission-preferences', + data=form.data, + follow_redirects=True) + assert resp.status_code == 200 + + # Check that the source interface accepts only messages: + with source_app.test_client() as app: + app.get('/generate') + resp = app.post('/create', follow_redirects=True) + assert resp.status_code == 200 + + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') + assert 'Submit Messages' in text + assert len(soup.select('input[type="file"]')) == 0 + + +def test_no_prevent_document_uploads(source_app, journalist_app, test_admin): + '''Test that the source interface accepts both files and messages when + allow_document_uploads == True. + + ''' + + # Set allow_document_uploads = True: + with journalist_app.test_client() as app: + _login_user(app, test_admin) + resp = app.post('/admin/update-submission-preferences', + follow_redirects=True) + assert resp.status_code == 200 + + # Check that the source interface accepts both files and messages: + with source_app.test_client() as app: + app.get('/generate') + resp = app.post('/create', follow_redirects=True) + assert resp.status_code == 200 + + text = resp.data.decode('utf-8') + soup = BeautifulSoup(text, 'html.parser') + assert 'Submit Files or Messages' in text + assert len(soup.select('input[type="file"]')) == 1 diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1,1345 +1,2156 @@ # -*- coding: utf-8 -*- -from cStringIO import StringIO import os +import pytest +import io import random -import unittest import zipfile +import base64 +import binascii -from flask import url_for, escape, session -from flask_testing import TestCase +from base64 import b64decode +from io import BytesIO +from flask import url_for, escape, session, current_app, g from mock import patch +from pyotp import TOTP +from sqlalchemy.sql.expression import func from sqlalchemy.orm.exc import StaleDataError from sqlalchemy.exc import IntegrityError -os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config import crypto_util -from db import (db_session, InvalidPasswordLength, Journalist, Reply, Source, - Submission) -import db -import journalist -import journalist_app -import journalist_app.utils -import utils +import models +import journalist_app as journalist_app_module +from . import utils + +os.environ['SECUREDROP_ENV'] = 'test' # noqa +from sdconfig import SDConfig, config + +from db import db +from models import (InvalidPasswordLength, InstanceConfig, Journalist, Reply, Source, + Submission) +from .utils.instrument import InstrumentedApp # Smugly seed the RNG for deterministic testing -random.seed('¯\_(ツ)_/¯') +random.seed(r'¯\_(ツ)_/¯') VALID_PASSWORD = 'correct horse battery staple generic passphrase hooray' VALID_PASSWORD_2 = 'another correct horse battery staple generic passphrase' +# These are factored out of the tests because some test have a +# postive/negative case under varying conditions, and we don't want +# false postives after modifying a string in the application. +EMPTY_REPLY_TEXT = "You cannot send an empty reply." +ADMIN_LINK = '<a href="/admin/" id="link-admin-index">' + + +def _login_user(app, username, password, otp_secret): + resp = app.post(url_for('main.login'), + data={'username': username, + 'password': password, + 'token': TOTP(otp_secret).now()}, + follow_redirects=True) + assert resp.status_code == 200 + assert hasattr(g, 'user') # ensure logged in + + +def test_user_with_whitespace_in_username_can_login(journalist_app): + # Create a user with whitespace at the end of the username + with journalist_app.app_context(): + username_with_whitespace = 'journalist ' + user, password = utils.db_helper.init_journalist(is_admin=False) + otp_secret = user.otp_secret + user.username = username_with_whitespace + db.session.add(user) + db.session.commit() + + # Verify that user is able to login successfully + with journalist_app.test_client() as app: + _login_user(app, username_with_whitespace, password, + otp_secret) + + +def test_make_password(journalist_app): + with patch.object(crypto_util.CryptoUtil, 'genrandomid', + side_effect=['bad', VALID_PASSWORD]): + fake_config = SDConfig() + with journalist_app.test_request_context('/'): + password = journalist_app_module.utils.make_password(fake_config) + assert password == VALID_PASSWORD + + +def test_reply_error_logging(journalist_app, test_journo, test_source): + exception_class = StaleDataError + exception_msg = 'Potentially sensitive content!' + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], + test_journo['password'], test_journo['otp_secret']) + with patch.object(journalist_app.logger, 'error') \ + as mocked_error_logger: + with patch.object(db.session, 'commit', + side_effect=exception_class(exception_msg)): + resp = app.post( + url_for('main.reply'), + data={'filesystem_id': test_source['filesystem_id'], + 'message': '_'}, + follow_redirects=True) + assert resp.status_code == 200 + + # Notice the "potentially sensitive" exception_msg is not present in + # the log event. + mocked_error_logger.assert_called_once_with( + "Reply from '{}' (ID {}) failed: {}!".format( + test_journo['username'], + test_journo['id'], + exception_class)) + + +def test_reply_error_flashed_message(journalist_app, test_journo, test_source): + exception_class = StaleDataError + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], + test_journo['password'], test_journo['otp_secret']) + + with InstrumentedApp(app) as ins: + with patch.object(db.session, 'commit', + side_effect=exception_class()): + app.post(url_for('main.reply'), + data={'filesystem_id': test_source['filesystem_id'], + 'message': '_'}) + + ins.assert_message_flashed( + 'An unexpected error occurred! Please ' + 'inform your admin.', 'error') + + +def test_empty_replies_are_rejected(journalist_app, test_journo, test_source): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], + test_journo['password'], test_journo['otp_secret']) + resp = app.post(url_for('main.reply'), + data={'filesystem_id': test_source['filesystem_id'], + 'message': ''}, + follow_redirects=True) -class TestJournalistApp(TestCase): + text = resp.data.decode('utf-8') + assert EMPTY_REPLY_TEXT in text - # A method required by flask_testing.TestCase - def create_app(self): - return journalist.app - def setUp(self): - utils.env.setup() +def test_nonempty_replies_are_accepted(journalist_app, test_journo, + test_source): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], + test_journo['password'], test_journo['otp_secret']) + resp = app.post(url_for('main.reply'), + data={'filesystem_id': test_source['filesystem_id'], + 'message': '_'}, + follow_redirects=True) - # Patch the two-factor verification to avoid intermittent errors - utils.db_helper.mock_verify_token(self) + text = resp.data.decode('utf-8') + assert EMPTY_REPLY_TEXT not in text - # Setup test users: user & admin - self.user, self.user_pw = utils.db_helper.init_journalist() - self.admin, self.admin_pw = utils.db_helper.init_journalist( - is_admin=True) - def tearDown(self): - utils.env.teardown() +def test_unauthorized_access_redirects_to_login(journalist_app): + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + resp = app.get(url_for('main.index')) + ins.assert_redirects(resp, url_for('main.login')) - @patch('crypto_util.genrandomid', side_effect=['bad', VALID_PASSWORD]) - def test_make_password(self, mocked_pw_gen): - class fake_config: - pass - assert (journalist_app.utils.make_password(fake_config) == - VALID_PASSWORD) - @patch('journalist.app.logger.error') - def test_reply_error_logging(self, mocked_error_logger): - source, _ = utils.db_helper.init_source() - filesystem_id = source.filesystem_id - self._login_user() +def test_login_throttle(journalist_app, test_journo): + # Overwrite the default value used during testing + original_hardening = models.LOGIN_HARDENING + models.LOGIN_HARDENING = True + try: + with journalist_app.test_client() as app: + for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD): + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password='invalid', + token='invalid')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password='invalid', + token='invalid')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert ("Please wait at least {} seconds".format( + Journalist._LOGIN_ATTEMPT_PERIOD) in text) + finally: + models.LOGIN_HARDENING = original_hardening + + +def test_login_throttle_is_not_global(journalist_app, test_journo, test_admin): + """The login throttling should be per-user, not global. Global login + throttling can prevent all users logging into the application.""" + + original_hardening = models.LOGIN_HARDENING + # Overwrite the default value used during testing + # Note that this may break other tests if doing parallel testing + models.LOGIN_HARDENING = True + try: + with journalist_app.test_client() as app: + for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD): + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password='invalid', + token='invalid')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password='invalid', + token='invalid')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert ("Please wait at least {} seconds".format( + Journalist._LOGIN_ATTEMPT_PERIOD) in text) + + # A different user should be able to login + resp = app.post( + url_for('main.login'), + data=dict(username=test_admin['username'], + password=test_admin['password'], + token=TOTP(test_admin['otp_secret']).now()), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Sources" in text + finally: + models.LOGIN_HARDENING = original_hardening + + +def test_login_invalid_credentials(journalist_app, test_journo): + with journalist_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(username=test_journo['username'], + password='invalid', + token='mocked')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + + +def test_validate_redirect(journalist_app): + with journalist_app.test_client() as app: + resp = app.post(url_for('main.index'), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login to access" in text - exception_class = StaleDataError - exception_msg = 'Potentially sensitive content!' - with patch('db.db_session.commit', - side_effect=exception_class(exception_msg)): - self.client.post(url_for('main.reply'), - data={'filesystem_id': filesystem_id, - 'message': '_'}) +def test_login_valid_credentials(journalist_app, test_journo): + with journalist_app.test_client() as app: + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password=test_journo['password'], + token=TOTP(test_journo['otp_secret']).now()), + follow_redirects=True) + assert resp.status_code == 200 # successful login redirects to index + text = resp.data.decode('utf-8') + assert "Sources" in text + assert "No documents have been submitted!" in text + + +def test_admin_login_redirects_to_index(journalist_app, test_admin): + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + resp = app.post( + url_for('main.login'), + data=dict(username=test_admin['username'], + password=test_admin['password'], + token=TOTP(test_admin['otp_secret']).now()), + follow_redirects=False) + ins.assert_redirects(resp, url_for('main.index')) + + +def test_user_login_redirects_to_index(journalist_app, test_journo): + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password=test_journo['password'], + token=TOTP(test_journo['otp_secret']).now()), + follow_redirects=False) + ins.assert_redirects(resp, url_for('main.index')) + + +def test_admin_has_link_to_edit_account_page_in_index_page(journalist_app, + test_admin): + with journalist_app.test_client() as app: + resp = app.post( + url_for('main.login'), + data=dict(username=test_admin['username'], + password=test_admin['password'], + token=TOTP(test_admin['otp_secret']).now()), + follow_redirects=True) + edit_account_link = ('<a href="/account/account" ' + 'id="link-edit-account">') + text = resp.data.decode('utf-8') + assert edit_account_link in text + + +def test_user_has_link_to_edit_account_page_in_index_page(journalist_app, + test_journo): + with journalist_app.test_client() as app: + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password=test_journo['password'], + token=TOTP(test_journo['otp_secret']).now()), + follow_redirects=True) + edit_account_link = ('<a href="/account/account" ' + 'id="link-edit-account">') + text = resp.data.decode('utf-8') + assert edit_account_link in text + + +def test_admin_has_link_to_admin_index_page_in_index_page(journalist_app, + test_admin): + with journalist_app.test_client() as app: + resp = app.post( + url_for('main.login'), + data=dict(username=test_admin['username'], + password=test_admin['password'], + token=TOTP(test_admin['otp_secret']).now()), + follow_redirects=True) + text = resp.data.decode('utf-8') + assert ADMIN_LINK in text + + +def test_user_lacks_link_to_admin_index_page_in_index_page(journalist_app, + test_journo): + with journalist_app.test_client() as app: + resp = app.post( + url_for('main.login'), + data=dict(username=test_journo['username'], + password=test_journo['password'], + token=TOTP(test_journo['otp_secret']).now()), + follow_redirects=True) + text = resp.data.decode('utf-8') + assert ADMIN_LINK not in text + + +def test_admin_logout_redirects_to_index(journalist_app, test_admin): + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + _login_user(app, test_admin['username'], + test_admin['password'], + test_admin['otp_secret']) + resp = app.get(url_for('main.logout')) + ins.assert_redirects(resp, url_for('main.index')) + + +def test_user_logout_redirects_to_index(journalist_app, test_journo): + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + _login_user(app, test_journo['username'], + test_journo['password'], + test_journo['otp_secret']) + resp = app.get(url_for('main.logout')) + ins.assert_redirects(resp, url_for('main.index')) + + +def test_admin_index(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.get(url_for('admin.index')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Admin Interface" in text - # Notice the "potentially sensitive" exception_msg is not present in - # the log event. - mocked_error_logger.assert_called_once_with( - "Reply from '{}' (ID {}) failed: {}!".format(self.user.username, - self.user.id, - exception_class)) - def test_reply_error_flashed_message(self): - source, _ = utils.db_helper.init_source() - filesystem_id = source.filesystem_id - self._login_user() +def test_admin_delete_user(journalist_app, test_admin, test_journo): + # Verify journalist is in the database + with journalist_app.app_context(): + assert Journalist.query.get(test_journo['id']) is not None - exception_class = StaleDataError + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.post(url_for('admin.delete_user', + user_id=test_journo['id']), + follow_redirects=True) - with patch('db.db_session.commit', side_effect=exception_class()): - self.client.post(url_for('main.reply'), - data={'filesystem_id': filesystem_id, - 'message': '_'}) + # Assert correct interface behavior + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert escape("Deleted user '{}'".format(test_journo['username'])) \ + in text - self.assertMessageFlashed( - 'An unexpected error occurred! Please ' - 'inform your administrator.', 'error') + # Verify journalist is no longer in the database + with journalist_app.app_context(): + assert Journalist.query.get(test_journo['id']) is None - def test_empty_replies_are_rejected(self): - source, _ = utils.db_helper.init_source() - filesystem_id = source.filesystem_id - self._login_user() - resp = self.client.post(url_for('main.reply'), - data={'filesystem_id': filesystem_id, - 'message': ''}, - follow_redirects=True) +def test_admin_cannot_delete_self(journalist_app, test_admin, test_journo): + # Verify journalist is in the database + with journalist_app.app_context(): + assert Journalist.query.get(test_journo['id']) is not None - self.assertIn("You cannot send an empty reply.", resp.data) + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.post(url_for('admin.delete_user', + user_id=test_admin['id']), + follow_redirects=True) - def test_nonempty_replies_are_accepted(self): - source, _ = utils.db_helper.init_source() - filesystem_id = source.filesystem_id - self._login_user() + # Assert correct interface behavior + assert resp.status_code == 403 - resp = self.client.post(url_for('main.reply'), - data={'filesystem_id': filesystem_id, - 'message': '_'}, - follow_redirects=True) + resp = app.get(url_for('admin.index'), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Admin Interface" in text + + # The user can be edited and deleted + assert escape("Edit user {}".format(test_journo['username'])) in text + assert escape("Delete user {}".format(test_journo['username'])) in text + # The admin can be edited but cannot deleted + assert escape("Edit user {}".format(test_admin['username'])) in text + assert escape("Delete user {}".format(test_admin['username'])) \ + not in text + + +def test_admin_edits_user_password_success_response(journalist_app, + test_admin, + test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.new_password', + user_id=test_journo['id']), + data=dict(password=VALID_PASSWORD_2), + follow_redirects=True) + assert resp.status_code == 200 - self.assertNotIn("You cannot send an empty reply.", resp.data) + text = resp.data.decode('utf-8') + assert 'Password updated.' in text + assert VALID_PASSWORD_2 in text - def test_unauthorized_access_redirects_to_login(self): - resp = self.client.get(url_for('main.index')) - self.assertRedirects(resp, url_for('main.login')) - def test_login_throttle(self): - db.LOGIN_HARDENING = True - try: - for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password='invalid', - token='mocked')) - self.assert200(resp) - self.assertIn("Login failed", resp.data) - - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password='invalid', - token='mocked')) - self.assert200(resp) - self.assertIn("Please wait at least {} seconds".format( - Journalist._LOGIN_ATTEMPT_PERIOD), resp.data) - finally: - db.LOGIN_HARDENING = False - - def test_login_invalid_credentials(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password='invalid', - token='mocked')) - self.assert200(resp) - self.assertIn("Login failed", resp.data) - - def test_validate_redirect(self): - resp = self.client.post(url_for('main.index'), - follow_redirects=True) - self.assert200(resp) - self.assertIn("Login to access", resp.data) - - def test_login_valid_credentials(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked'), - follow_redirects=True) - self.assert200(resp) # successful login redirects to index - self.assertIn("Sources", resp.data) - self.assertIn("No documents have been submitted!", resp.data) - - def test_admin_login_redirects_to_index(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.admin.username, - password=self.admin_pw, - token='mocked')) - self.assertRedirects(resp, url_for('main.index')) - - def test_user_login_redirects_to_index(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked')) - self.assertRedirects(resp, url_for('main.index')) - - def test_admin_has_link_to_edit_account_page_in_index_page(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.admin.username, - password=self.admin_pw, - token='mocked'), - follow_redirects=True) - edit_account_link = '<a href="{}" id="link-edit-account">'.format( - url_for('account.edit')) - self.assertIn(edit_account_link, resp.data) - - def test_user_has_link_to_edit_account_page_in_index_page(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked'), - follow_redirects=True) - edit_account_link = '<a href="{}" id="link-edit-account">'.format( - url_for('account.edit')) - self.assertIn(edit_account_link, resp.data) - - def test_admin_has_link_to_admin_index_page_in_index_page(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.admin.username, - password=self.admin_pw, - token='mocked'), - follow_redirects=True) - admin_link = '<a href="{}" id="link-admin-index">'.format( - url_for('admin.index')) - self.assertIn(admin_link, resp.data) - - def test_user_lacks_link_to_admin_index_page_in_index_page(self): - resp = self.client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked'), - follow_redirects=True) - admin_link = '<a href="{}" id="link-admin-index">'.format( - url_for('admin.index')) - self.assertNotIn(admin_link, resp.data) - - # WARNING: we are purposely doing something that would not work in - # production in the _login_user and _login_admin methods. This is done as a - # reminder to the test developer that the flask_testing.TestCase only uses - # one request context per method (see - # https://github.com/freedomofpress/securedrop/issues/1444). By explicitly - # making a point of this, we hope to avoid the introduction of new tests, - # that do not truly prove their result because of this disconnect between - # request context in Flask Testing and production. - # - # TODO: either ditch Flask Testing or subclass it as discussed in the - # aforementioned issue to fix the described problem. - def _login_admin(self): - self._ctx.g.user = self.admin - - def _login_user(self): - self._ctx.g.user = self.user - - def test_admin_logout_redirects_to_index(self): - self._login_admin() - resp = self.client.get(url_for('main.logout')) - self.assertRedirects(resp, url_for('main.index')) - - def test_user_logout_redirects_to_index(self): - self._login_user() - resp = self.client.get(url_for('main.logout')) - self.assertRedirects(resp, url_for('main.index')) - - def test_admin_index(self): - self._login_admin() - resp = self.client.get(url_for('admin.index')) - self.assert200(resp) - self.assertIn("Admin Interface", resp.data) - - def test_admin_delete_user(self): - # Verify journalist is in the database - self.assertNotEqual(Journalist.query.get(self.user.id), None) - - self._login_admin() - resp = self.client.post(url_for('admin.delete_user', - user_id=self.user.id), - follow_redirects=True) +def test_admin_edits_user_password_session_invalidate(journalist_app, + test_admin, + test_journo): + # Start the journalist session. + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) - # Assert correct interface behavior - self.assert200(resp) - self.assertIn(escape("Deleted user '{}'".format(self.user.username)), - resp.data) - # Verify journalist is no longer in the database - self.assertEqual(Journalist.query.get(self.user.id), None) - - def test_admin_deletes_invalid_user_404(self): - self._login_admin() - invalid_user_pk = max([user.id for user in Journalist.query.all()]) + 1 - resp = self.client.post(url_for('admin.delete_user', - user_id=invalid_user_pk)) - self.assert404(resp) - - def test_admin_edits_user_password_success_response(self): - self._login_admin() - - resp = self.client.post( - url_for('admin.new_password', user_id=self.user.id), - data=dict(password=VALID_PASSWORD_2), - follow_redirects=True) + # Change the journalist password via an admin session. + with journalist_app.test_client() as admin_app: + _login_user(admin_app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = admin_app.post(url_for('admin.new_password', + user_id=test_journo['id']), + data=dict(password=VALID_PASSWORD_2), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert 'Password updated.' in text + assert VALID_PASSWORD_2 in text + + # Now verify the password change error is flashed. + resp = app.get(url_for('main.index'), follow_redirects=True) text = resp.data.decode('utf-8') - assert 'Password updated.' in text - assert VALID_PASSWORD_2 in text + assert 'You have been logged out due to password change' in text - def test_admin_edits_user_password_error_response(self): - self._login_admin() + # Also ensure that session is now invalid. + session.pop('expires', None) + session.pop('csrf_token', None) + assert not session, session - with patch('db.db_session.commit', side_effect=Exception()): - resp = self.client.post( - url_for('admin.new_password', user_id=self.user.id), - data=dict(password=VALID_PASSWORD_2), - follow_redirects=True) - assert ('There was an error, and the new password might not have ' - 'been saved correctly.') in resp.data.decode('utf-8') - - def test_user_edits_password_success_response(self): - self._login_user() - resp = self.client.post( - url_for('account.new_password'), - data=dict(current_password=self.user_pw, - token='mocked', - password=VALID_PASSWORD_2), - follow_redirects=True) +def test_admin_deletes_invalid_user_404(journalist_app, test_admin): + with journalist_app.app_context(): + invalid_id = db.session.query(func.max(Journalist.id)).scalar() + 1 - text = resp.data.decode('utf-8') - assert "Password updated." in text - assert VALID_PASSWORD_2 in text + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.post(url_for('admin.delete_user', user_id=invalid_id)) + assert resp.status_code == 404 + + +def test_admin_edits_user_password_error_response(journalist_app, + test_admin, + test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + with patch('sqlalchemy.orm.scoping.scoped_session.commit', + side_effect=Exception()): + resp = app.post(url_for('admin.new_password', + user_id=test_journo['id']), + data=dict(password=VALID_PASSWORD_2), + follow_redirects=True) - def test_user_edits_password_expires_session(self): - with self.client as client: - # do a real login to get a real session - # (none of the mocking `g` hacks) - resp = client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked')) - self.assertRedirects(resp, url_for('main.index')) + text = resp.data.decode('utf-8') + assert ('There was an error, and the new password might not have ' + 'been saved correctly.') in text + + +def test_user_edits_password_success_response(journalist_app, test_journo): + original_hardening = models.LOGIN_HARDENING + try: + # Set this to false because we login then immediately reuse the same + # token when authenticating to change the password. This triggers login + # hardening measures. + models.LOGIN_HARDENING = False + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + token = TOTP(test_journo['otp_secret']).now() + resp = app.post(url_for('account.new_password'), + data=dict(current_password=test_journo['password'], + token=token, + password=VALID_PASSWORD_2), + follow_redirects=True) + + text = resp.data.decode('utf-8') + assert "Password updated." in text + assert VALID_PASSWORD_2 in text + finally: + models.LOGIN_HARDENING = original_hardening + + +def test_user_edits_password_expires_session(journalist_app, test_journo): + original_hardening = models.LOGIN_HARDENING + try: + # Set this to false because we login then immediately reuse the same + # token when authenticating to change the password. This triggers login + # hardening measures. + models.LOGIN_HARDENING = False + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) assert 'uid' in session - resp = client.post( - url_for('account.new_password'), - data=dict(current_password=self.user_pw, - token='mocked', - password=VALID_PASSWORD_2)) + with InstrumentedApp(journalist_app) as ins: + token = TOTP(test_journo['otp_secret']).now() + resp = app.post(url_for('account.new_password'), + data=dict( + current_password=test_journo['password'], + token=token, + password=VALID_PASSWORD_2)) + + ins.assert_redirects(resp, url_for('main.login')) - self.assertRedirects(resp, url_for('main.login')) # verify the session was expired after the password was changed assert 'uid' not in session + finally: + models.LOGIN_HARDENING = original_hardening + + +def test_user_edits_password_error_reponse(journalist_app, test_journo): + original_hardening = models.LOGIN_HARDENING + try: + # Set this to false because we login then immediately reuse the same + # token when authenticating to change the password. This triggers login + # hardening measures. + models.LOGIN_HARDENING = False + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + # patch token verification because there are multiple commits + # to the database and this isolates the one we want to fail + with patch.object(Journalist, 'verify_token', return_value=True): + with patch.object(db.session, 'commit', + side_effect=Exception()): + resp = app.post( + url_for('account.new_password'), + data=dict(current_password=test_journo['password'], + token='mocked', + password=VALID_PASSWORD_2), + follow_redirects=True) + + text = resp.data.decode('utf-8') + assert ('There was an error, and the new password might not have ' + 'been saved correctly.') in text + finally: + models.LOGIN_HARDENING = original_hardening + + +def test_admin_add_user_when_username_already_taken(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + resp = app.post(url_for('admin.add_user'), + data=dict(username=test_admin['username'], + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + text = resp.data.decode('utf-8') + assert 'already taken' in text + + +def test_max_password_length(): + """Creating a Journalist with a password that is greater than the + maximum password length should raise an exception""" + overly_long_password = VALID_PASSWORD + \ + 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) + with pytest.raises(InvalidPasswordLength): + Journalist(username="My Password is Too Big!", + password=overly_long_password) + + +def test_min_password_length(): + """Creating a Journalist with a password that is smaller than the + minimum password length should raise an exception. This uses the + magic number 7 below to get around the "diceware-like" requirement + that may cause a failure before the length check. + """ + password = ('a ' * 7)[0:(Journalist.MIN_PASSWORD_LEN - 1)] + with pytest.raises(InvalidPasswordLength): + Journalist(username="My Password is Too Small!", + password=password) + + +def test_admin_edits_user_password_too_long_warning(journalist_app, + test_admin, + test_journo): + # append a bunch of a's to a diceware password to keep it "diceware-like" + overly_long_password = VALID_PASSWORD + \ + 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + with InstrumentedApp(journalist_app) as ins: + app.post( + url_for('admin.new_password', user_id=test_journo['id']), + data=dict(username=test_journo['username'], + first_name='', + last_name='', + is_admin=None, + password=overly_long_password), + follow_redirects=True) + + ins.assert_message_flashed('You submitted a bad password! ' + 'Password not changed.', 'error') + - def test_user_edits_password_error_reponse(self): - self._login_user() +def test_user_edits_password_too_long_warning(journalist_app, test_journo): + overly_long_password = VALID_PASSWORD + \ + 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) - with patch('db.db_session.commit', side_effect=Exception()): - resp = self.client.post( + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post( url_for('account.new_password'), - data=dict(current_password=self.user_pw, - token='mocked', - password=VALID_PASSWORD_2), + data=dict(username=test_journo['username'], + first_name='', + last_name='', + is_admin=None, + token=TOTP(test_journo['otp_secret']).now(), + current_password=test_journo['password'], + password=overly_long_password), follow_redirects=True) - assert ('There was an error, and the new password might not have ' - 'been saved correctly.') in resp.data.decode('utf-8') - - def test_admin_add_user_when_username_already_in_use(self): - self._login_admin() - resp = self.client.post(url_for('admin.add_user'), - data=dict(username=self.admin.username, - password=VALID_PASSWORD, - is_admin=None)) - self.assertIn('That username is already in use', resp.data) - - def test_max_password_length(self): - """Creating a Journalist with a password that is greater than the - maximum password length should raise an exception""" - overly_long_password = VALID_PASSWORD + \ - 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) - with self.assertRaises(InvalidPasswordLength): - Journalist(username="My Password is Too Big!", - password=overly_long_password) - - def test_min_password_length(self): - """Creating a Journalist with a password that is smaller than the - minimum password length should raise an exception. This uses the - magic number 7 below to get around the "diceware-like" requirement - that may cause a failure before the length check. - """ - password = ('a ' * 7)[0:(Journalist.MIN_PASSWORD_LEN - 1)] - with self.assertRaises(InvalidPasswordLength): - Journalist(username="My Password is Too Small!", - password=password) - - def test_admin_edits_user_password_too_long_warning(self): - self._login_admin() - overly_long_password = VALID_PASSWORD + \ - 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) - - self.client.post( - url_for('admin.new_password', user_id=self.user.id), - data=dict(username=self.user.username, is_admin=None, - password=overly_long_password), - follow_redirects=True) + ins.assert_message_flashed('You submitted a bad password! ' + 'Password not changed.', 'error') + + +def test_admin_add_user_password_too_long_warning(journalist_app, test_admin): + overly_long_password = VALID_PASSWORD + \ + 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post( + url_for('admin.add_user'), + data=dict(username='dellsberg', + first_name='', + last_name='', + password=overly_long_password, + is_admin=None)) + + ins.assert_message_flashed( + 'There was an error with the autogenerated password. User not ' + 'created. Please try again.', 'error') + + +def test_admin_add_user_first_name_too_long_warning(journalist_app, test_admin): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + resp = app.post(url_for('admin.add_user'), + data=dict(username=test_admin['username'], + first_name=overly_long_name, + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + text = resp.data.decode('utf-8') + assert 'Field can not be more than' in text + + +def test_admin_add_user_last_name_too_long_warning(journalist_app, test_admin): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + resp = app.post(url_for('admin.add_user'), + data=dict(username=test_admin['username'], + first_name='', + last_name=overly_long_name, + password=VALID_PASSWORD, + is_admin=None)) + text = resp.data.decode('utf-8') + assert 'Field can not be more than' in text - self.assertMessageFlashed('You submitted a bad password! ' - 'Password not changed.', 'error') - def test_user_edits_password_too_long_warning(self): - self._login_user() - overly_long_password = VALID_PASSWORD + \ - 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) +def test_admin_edits_user_invalid_username( + journalist_app, test_admin, test_journo): + """Test expected error message when admin attempts to change a user's + username to a username that is taken by another user.""" + new_username = test_journo['username'] + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) - self.client.post(url_for('account.new_password'), - data=dict(password=overly_long_password, - token='mocked', - current_password=self.user_pw), - follow_redirects=True) + with InstrumentedApp(journalist_app) as ins: + app.post( + url_for('admin.edit_user', user_id=test_admin['id']), + data=dict(username=new_username, + first_name='', + last_name='', + is_admin=None)) + + ins.assert_message_flashed( + 'Username "{}" already taken.'.format(new_username), + 'error') + + +def test_admin_resets_user_hotp_format_non_hexa( + journalist_app, test_admin, test_journo): + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + journo = test_journo['journalist'] + # guard to ensure check below tests the correct condition + assert journo.is_totp + + old_secret = journo.otp_secret + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.reset_two_factor_hotp'), + data=dict(uid=test_journo['id'], otp_secret='ZZ')) + + # fetch altered DB object + journo = Journalist.query.get(journo.id) + + new_secret = journo.otp_secret + assert old_secret == new_secret + + # ensure we didn't accidentally enable hotp + assert journo.is_totp + + ins.assert_message_flashed( + "Invalid secret format: please only submit letters A-F and " + "numbers 0-9.", "error") + + +def test_admin_resets_user_hotp(journalist_app, test_admin, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + journo = test_journo['journalist'] + old_secret = journo.otp_secret + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.reset_two_factor_hotp'), + data=dict(uid=test_journo['id'], + otp_secret=123456)) + + # fetch altered DB object + journo = Journalist.query.get(journo.id) + + new_secret = journo.otp_secret + assert old_secret != new_secret + assert not journo.is_totp + + # Redirect to admin 2FA view + ins.assert_redirects(resp, url_for('admin.new_user_two_factor', + uid=journo.id)) + + +def test_admin_resets_user_hotp_format_odd(journalist_app, + test_admin, + test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.reset_two_factor_hotp'), + data=dict(uid=test_journo['id'], otp_secret='Z')) + + ins.assert_message_flashed( + "Invalid secret format: " + "odd-length secret. Did you mistype the secret?", "error") + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert old_secret == new_secret + + +def test_admin_resets_user_hotp_error(mocker, + journalist_app, + test_admin, + test_journo): + + bad_secret = '1234' + error_message = 'SOMETHING WRONG!' + mocked_error_logger = mocker.patch('journalist.app.logger.error') + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + mocker.patch('models.Journalist.set_hotp_secret', + side_effect=binascii.Error(error_message)) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.reset_two_factor_hotp'), + data=dict(uid=test_journo['id'], otp_secret=bad_secret)) + ins.assert_message_flashed("An unexpected error occurred! " + "Please inform your admin.", + "error") + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert new_secret == old_secret + + mocked_error_logger.assert_called_once_with( + "set_hotp_secret '{}' (id {}) failed: {}".format( + bad_secret, test_journo['id'], error_message)) + + +def test_user_resets_hotp(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + new_secret = 123456 + + # Precondition + assert new_secret != old_secret + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.reset_two_factor_hotp'), + data=dict(otp_secret=new_secret)) + # should redirect to verification page + ins.assert_redirects(resp, url_for('account.new_two_factor')) + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert old_secret != new_secret + + +def test_user_resets_user_hotp_format_odd(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('account.reset_two_factor_hotp'), + data=dict(otp_secret='123')) + ins.assert_message_flashed( + "Invalid secret format: " + "odd-length secret. Did you mistype the secret?", "error") + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert old_secret == new_secret + + +def test_user_resets_user_hotp_format_non_hexa(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('account.reset_two_factor_hotp'), + data=dict(otp_secret='ZZ')) + ins.assert_message_flashed( + "Invalid secret format: " + "please only submit letters A-F and numbers 0-9.", "error") + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert old_secret == new_secret + + +def test_user_resets_user_hotp_error(mocker, + journalist_app, + test_journo): + bad_secret = '1234' + old_secret = test_journo['otp_secret'] + error_message = 'SOMETHING WRONG!' + mocked_error_logger = mocker.patch('journalist.app.logger.error') + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + mocker.patch('models.Journalist.set_hotp_secret', + side_effect=binascii.Error(error_message)) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('account.reset_two_factor_hotp'), + data=dict(otp_secret=bad_secret)) + ins.assert_message_flashed( + "An unexpected error occurred! Please inform your " + "admin.", "error") + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert old_secret == new_secret + mocked_error_logger.assert_called_once_with( + "set_hotp_secret '{}' (id {}) failed: {}".format( + bad_secret, test_journo['id'], error_message)) + + +def test_admin_resets_user_totp(journalist_app, test_admin, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post( + url_for('admin.reset_two_factor_totp'), + data=dict(uid=test_journo['id'])) + ins.assert_redirects( + resp, + url_for('admin.new_user_two_factor', uid=test_journo['id'])) + + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert new_secret != old_secret + + +def test_user_resets_totp(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.reset_two_factor_totp')) + # should redirect to verification page + ins.assert_redirects(resp, url_for('account.new_two_factor')) - self.assertMessageFlashed('You submitted a bad password! ' - 'Password not changed.', 'error') - - def test_admin_add_user_password_too_long_warning(self): - self._login_admin() - - overly_long_password = VALID_PASSWORD + \ - 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) - self.client.post( - url_for('admin.add_user'), - data=dict(username='dellsberg', - password=overly_long_password, - is_admin=None)) - - self.assertMessageFlashed('There was an error with the autogenerated ' - 'password. User not created. ' - 'Please try again.', 'error') - - def test_admin_edits_user_invalid_username(self): - """Test expected error message when admin attempts to change a user's - username to a username that is taken by another user.""" - self._login_admin() - new_username = self.admin.username - - self.client.post( - url_for('admin.edit_user', user_id=self.user.id), - data=dict(username=new_username, is_admin=None)) - - self.assertMessageFlashed('Username "{}" already taken.'.format( - new_username), 'error') - - def test_admin_resets_user_hotp(self): - self._login_admin() - old_hotp = self.user.hotp - - resp = self.client.post(url_for('admin.reset_two_factor_hotp'), - data=dict(uid=self.user.id, otp_secret=123456)) - new_hotp = self.user.hotp - - # check that hotp is different - self.assertNotEqual(old_hotp.secret, new_hotp.secret) - # Redirect to admin 2FA view - self.assertRedirects( - resp, - url_for('admin.new_user_two_factor', uid=self.user.id)) - - def test_admin_resets_user_hotp_format_non_hexa(self): - self._login_admin() - old_hotp = self.user.hotp.secret - - self.client.post(url_for('admin.reset_two_factor_hotp'), - data=dict(uid=self.user.id, otp_secret='ZZ')) - new_hotp = self.user.hotp.secret - - self.assertEqual(old_hotp, new_hotp) - self.assertMessageFlashed( - "Invalid secret format: " - "please only submit letters A-F and numbers 0-9.", "error") - - def test_admin_resets_user_hotp_format_odd(self): - self._login_admin() - old_hotp = self.user.hotp.secret - - self.client.post(url_for('admin.reset_two_factor_hotp'), - data=dict(uid=self.user.id, otp_secret='Z')) - new_hotp = self.user.hotp.secret - - self.assertEqual(old_hotp, new_hotp) - self.assertMessageFlashed( - "Invalid secret format: " - "odd-length secret. Did you mistype the secret?", "error") - - @patch('db.Journalist.set_hotp_secret') - @patch('journalist.app.logger.error') - def test_admin_resets_user_hotp_error(self, - mocked_error_logger, - mock_set_hotp_secret): - self._login_admin() - old_hotp = self.user.hotp.secret - - error_message = 'SOMETHING WRONG!' - mock_set_hotp_secret.side_effect = TypeError(error_message) - - otp_secret = '1234' - self.client.post(url_for('admin.reset_two_factor_hotp'), - data=dict(uid=self.user.id, otp_secret=otp_secret)) - new_hotp = self.user.hotp.secret - - self.assertEqual(old_hotp, new_hotp) - self.assertMessageFlashed("An unexpected error occurred! " - "Please inform your administrator.", "error") - mocked_error_logger.assert_called_once_with( - "set_hotp_secret '{}' (id {}) failed: {}".format( - otp_secret, self.user.id, error_message)) - - def test_user_resets_hotp(self): - self._login_user() - old_hotp = self.user.hotp - - resp = self.client.post(url_for('account.reset_two_factor_hotp'), - data=dict(otp_secret=123456)) - new_hotp = self.user.hotp - - # check that hotp is different - self.assertNotEqual(old_hotp.secret, new_hotp.secret) - # should redirect to verification page - self.assertRedirects(resp, url_for('account.new_two_factor')) - - def test_admin_resets_user_totp(self): - self._login_admin() - old_totp = self.user.totp - - resp = self.client.post( - url_for('admin.reset_two_factor_totp'), - data=dict(uid=self.user.id)) - new_totp = self.user.totp - - self.assertNotEqual(old_totp.secret, new_totp.secret) - - self.assertRedirects( - resp, - url_for('admin.new_user_two_factor', uid=self.user.id)) - - def test_user_resets_totp(self): - self._login_user() - old_totp = self.user.totp - - resp = self.client.post(url_for('account.reset_two_factor_totp')) - new_totp = self.user.totp - - # check that totp is different - self.assertNotEqual(old_totp.secret, new_totp.secret) - - # should redirect to verification page - self.assertRedirects(resp, url_for('account.new_two_factor')) - - def test_admin_resets_hotp_with_missing_otp_secret_key(self): - self._login_admin() - resp = self.client.post(url_for('admin.reset_two_factor_hotp'), - data=dict(uid=self.user.id)) - - self.assertIn('Change Secret', resp.data) - - def test_admin_new_user_2fa_redirect(self): - self._login_admin() - resp = self.client.post( - url_for('admin.new_user_two_factor', uid=self.user.id), - data=dict(token='mocked')) - self.assertRedirects(resp, url_for('admin.index')) - - def test_http_get_on_admin_new_user_two_factor_page(self): - self._login_admin() - resp = self.client.get(url_for('admin.new_user_two_factor', - uid=self.user.id)) - # any GET req should take a user to the admin.new_user_two_factor page - self.assertIn('FreeOTP', resp.data) - - def test_http_get_on_admin_add_user_page(self): - self._login_admin() - resp = self.client.get(url_for('admin.add_user')) + # Re-fetch journalist to get fresh DB instance + user = Journalist.query.get(test_journo['id']) + new_secret = user.otp_secret + + assert new_secret != old_secret + + +def test_admin_resets_hotp_with_missing_otp_secret_key(journalist_app, + test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.post(url_for('admin.reset_two_factor_hotp'), + data=dict(uid=test_admin['id'])) + + assert 'Change Secret' in resp.data.decode('utf-8') + + +def test_admin_new_user_2fa_redirect(journalist_app, test_admin, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + with InstrumentedApp(journalist_app) as ins: + resp = app.post( + url_for('admin.new_user_two_factor', uid=test_journo['id']), + data=dict(token=TOTP(test_journo['otp_secret']).now())) + ins.assert_redirects(resp, url_for('admin.index')) + + +def test_http_get_on_admin_new_user_two_factor_page( + journalist_app, + test_admin, + test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.get(url_for('admin.new_user_two_factor', uid=test_journo['id'])) + # any GET req should take a user to the admin.new_user_two_factor page + assert 'FreeOTP' in resp.data.decode('utf-8') + + +def test_http_get_on_admin_add_user_page(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + resp = app.get(url_for('admin.add_user')) # any GET req should take a user to the admin_add_user page - self.assertIn('ADD USER', resp.data) - - def test_admin_add_user(self): - self._login_admin() - max_journalist_pk = max([user.id for user in Journalist.query.all()]) - - resp = self.client.post(url_for('admin.add_user'), - data=dict(username='dellsberg', - password=VALID_PASSWORD, - is_admin=None)) - - self.assertRedirects(resp, url_for('admin.new_user_two_factor', - uid=max_journalist_pk+1)) - - def test_admin_add_user_without_username(self): - self._login_admin() - resp = self.client.post(url_for('admin.add_user'), - data=dict(username='', - password=VALID_PASSWORD, - is_admin=None)) - self.assertIn('This field is required.', resp.data) - - def test_admin_add_user_too_short_username(self): - self._login_admin() - username = 'a' * (Journalist.MIN_USERNAME_LEN - 1) - resp = self.client.post(url_for('admin.add_user'), - data=dict(username=username, - password='pentagonpapers', - password_again='pentagonpapers', - is_admin=None)) - self.assertIn('Field must be at least {} characters long'.format( - Journalist.MIN_USERNAME_LEN), - resp.data) - - def test_admin_add_user_yubikey_odd_length(self): - self._login_admin() - resp = self.client.post(url_for('admin.add_user'), - data=dict(username='dellsberg', - password=VALID_PASSWORD, - password_again=VALID_PASSWORD, - is_admin=None, - is_hotp=True, - otp_secret='123')) - self.assertIn('Field must be 40 characters long', resp.data) - - def test_admin_add_user_yubikey_valid_length(self): - self._login_admin() - - otp = '1234567890123456789012345678901234567890' - resp = self.client.post(url_for('admin.add_user'), - data=dict(username='dellsberg', - password=VALID_PASSWORD, - password_again=VALID_PASSWORD, - is_admin=None, - is_hotp=True, - otp_secret=otp), - follow_redirects=True) - - # Should redirect to the token verification page - self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data) - - def test_admin_add_user_yubikey_correct_length_with_whitespace(self): - self._login_admin() - - otp = '12 34 56 78 90 12 34 56 78 90 12 34 56 78 90 12 34 56 78 90' - resp = self.client.post(url_for('admin.add_user'), - data=dict(username='dellsberg', - password=VALID_PASSWORD, - password_again=VALID_PASSWORD, - is_admin=None, - is_hotp=True, - otp_secret=otp), - follow_redirects=True) - - # Should redirect to the token verification page - self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data) - - def test_admin_sets_user_to_admin(self): - self._login_admin() - new_user = 'admin-set-user-to-admin-test' - resp = self.client.post(url_for('admin.add_user'), - data=dict(username=new_user, - password=VALID_PASSWORD, - is_admin=None)) + assert 'ADD USER' in resp.data.decode('utf-8') + + +def test_admin_add_user(journalist_app, test_admin): + username = 'dellsberg' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.add_user'), + data=dict(username=username, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + + new_user = Journalist.query.filter_by(username=username).one() + ins.assert_redirects(resp, url_for('admin.new_user_two_factor', + uid=new_user.id)) + + +def test_admin_add_user_without_username(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username='', + password=VALID_PASSWORD, + is_admin=None)) + + assert 'This field is required.' in resp.data.decode('utf-8') + + +def test_admin_add_user_too_short_username(journalist_app, test_admin): + username = 'a' * (Journalist.MIN_USERNAME_LEN - 1) + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=username, + password='pentagonpapers', + password_again='pentagonpapers', + is_admin=None)) + msg = 'Field must be at least {} characters long' + assert (msg.format(Journalist.MIN_USERNAME_LEN) in resp.data.decode('utf-8')) + + +def test_admin_add_user_yubikey_odd_length(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username='dellsberg', + first_name='', + last_name='', + password=VALID_PASSWORD, + password_again=VALID_PASSWORD, + is_admin=None, + is_hotp=True, + otp_secret='123')) + assert 'HOTP secrets are 40 characters' in resp.data.decode('utf-8') + + +def test_admin_add_user_yubikey_valid_length(journalist_app, test_admin): + otp = '1234567890123456789012345678901234567890' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username='dellsberg', + first_name='', + last_name='', + password=VALID_PASSWORD, + password_again=VALID_PASSWORD, + is_admin=None, + is_hotp=True, + otp_secret=otp), + follow_redirects=True) + + # Should redirect to the token verification page + assert 'Enable YubiKey (OATH-HOTP)' in resp.data.decode('utf-8') + + +def test_admin_add_user_yubikey_correct_length_with_whitespace( + journalist_app, + test_admin): + otp = '12 34 56 78 90 12 34 56 78 90 12 34 56 78 90 12 34 56 78 90' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username='dellsberg', + first_name='', + last_name='', + password=VALID_PASSWORD, + password_again=VALID_PASSWORD, + is_admin=None, + is_hotp=True, + otp_secret=otp), + follow_redirects=True) + + # Should redirect to the token verification page + assert 'Enable YubiKey (OATH-HOTP)' in resp.data.decode('utf-8') + + +def test_admin_sets_user_to_admin(journalist_app, test_admin): + new_user = 'admin-set-user-to-admin-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) assert resp.status_code in (200, 302) - journo = Journalist.query.filter(Journalist.username == new_user).one() - assert not journo.is_admin - resp = self.client.post(url_for('admin.edit_user', user_id=journo.id), - data=dict(is_admin=True)) - assert resp.status_code in (200, 302), resp.data.decode('utf-8') + journo = Journalist.query.filter_by(username=new_user).one() + # precondition check + assert journo.is_admin is False - # there are better ways to do this, but flake8 complains - journo = Journalist.query.filter(Journalist.username == new_user).one() + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(first_name='', last_name='', is_admin=True)) + assert resp.status_code in (200, 302) + + journo = Journalist.query.filter_by(username=new_user).one() assert journo.is_admin is True - def test_admin_renames_user(self): - self._login_admin() - new_user = 'admin-renames-user-test' - resp = self.client.post(url_for('admin.add_user'), - data=dict(username=new_user, - password=VALID_PASSWORD, - is_admin=None)) + +def test_admin_renames_user(journalist_app, test_admin): + new_user = 'admin-renames-user-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) assert resp.status_code in (200, 302) journo = Journalist.query.filter(Journalist.username == new_user).one() new_user = new_user + 'a' - resp = self.client.post(url_for('admin.edit_user', user_id=journo.id), - data=dict(username=new_user)) - assert resp.status_code in (200, 302), resp.data.decode('utf-8') - - # the following will throw an exception if new_user is not found - # therefore asserting it has been created - Journalist.query.filter(Journalist.username == new_user).one() - - @patch('journalist_app.admin.current_app.logger.error') - @patch('journalist_app.admin.Journalist', - side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None)) - def test_admin_add_user_integrity_error(self, - mock_journalist, - mocked_error_logger): - self._login_admin() - - self.client.post(url_for('admin.add_user'), - data=dict(username='username', - password=VALID_PASSWORD, - is_admin=None)) - - log_event = mocked_error_logger.call_args[0][0] - self.assertIn( - "Adding user 'username' failed: (__builtin__.NoneType) " - "None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']", - log_event) - self.assertMessageFlashed( - "An error occurred saving this user to the database." - " Please inform your administrator.", - "error") - - def test_logo_upload_with_valid_image_succeeds(self): - # Save original logo to restore after test run - logo_image_location = os.path.join(config.SECUREDROP_ROOT, - "static/i/logo.png") - with open(logo_image_location) as logo_file: - original_image = logo_file.read() - - try: - self._login_admin() - - form = journalist_app.forms.LogoForm( - logo=(StringIO('imagedata'), 'test.png') + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(username=new_user, + first_name='', + last_name='')) + assert resp.status_code in (200, 302), resp.data.decode('utf-8') + + # the following will throw an exception if new_user is not found + # therefore asserting it has been created + Journalist.query.filter(Journalist.username == new_user).one() + + +def test_admin_adds_first_name_last_name_to_user(journalist_app, test_admin): + new_user = 'admin-first-name-last-name-user-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + assert resp.status_code in (200, 302) + journo = Journalist.query.filter(Journalist.username == new_user).one() + + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(username=new_user, + first_name='test name', + last_name='test name')) + assert resp.status_code in (200, 302) + + # the following will throw an exception if new_user is not found + # therefore asserting it has been created + Journalist.query.filter(Journalist.username == new_user).one() + + +def test_admin_adds_invalid_first_last_name_to_user(journalist_app, test_admin): + new_user = 'admin-invalid-first-name-last-name-user-test' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=new_user, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + assert resp.status_code in (200, 302) + journo = Journalist.query.filter(Journalist.username == new_user).one() + + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + resp = app.post(url_for('admin.edit_user', user_id=journo.id), + data=dict(username=overly_long_name, + first_name=overly_long_name, + last_name='test name'), + follow_redirects=True) + assert resp.status_code in (200, 302) + text = resp.data.decode('utf-8') + assert 'Name not updated' in text + + +def test_admin_add_user_integrity_error(journalist_app, test_admin, mocker): + mocked_error_logger = mocker.patch('journalist_app.admin.current_app.logger.error') + mocker.patch('journalist_app.admin.Journalist', + side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None)) + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.add_user'), + data=dict(username='username', + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + ins.assert_message_flashed( + "An error occurred saving this user to the database." + " Please inform your admin.", + "error") + + log_event = mocked_error_logger.call_args[0][0] + assert ("Adding user 'username' failed: (builtins.NoneType) " + "None\n[SQL: STATEMENT]\n[parameters: 'PARAMETERS']") in log_event + + +def test_prevent_document_uploads(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + form = journalist_app_module.forms.SubmissionPreferencesForm( + prevent_document_uploads=True) + app.post(url_for('admin.update_submission_preferences'), + data=form.data, + follow_redirects=True) + assert InstanceConfig.get_current().allow_document_uploads is False + + +def test_no_prevent_document_uploads(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + app.post(url_for('admin.update_submission_preferences'), + follow_redirects=True) + assert InstanceConfig.get_current().allow_document_uploads is True + + +def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin): + # Save original logo to restore after test run + logo_image_location = os.path.join(config.SECUREDROP_ROOT, + "static/i/logo.png") + with io.open(logo_image_location, 'rb') as logo_file: + original_image = logo_file.read() + + try: + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + # Create 1px * 1px 'white' PNG file from its base64 string + form = journalist_app_module.forms.LogoForm( + logo=(BytesIO(base64.decodestring + (b"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQ" + b"VR42mP8/x8AAwMCAO+ip1sAAAAASUVORK5CYII=")), 'test.png') ) - self.client.post(url_for('admin.manage_config'), - data=form.data, - follow_redirects=True) + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.manage_config'), + data=form.data, + follow_redirects=True) - self.assertMessageFlashed("Image updated.", "notification") - finally: - # Restore original image to logo location for subsequent tests - with open(logo_image_location, 'w') as logo_file: - logo_file.write(original_image) + ins.assert_message_flashed("Image updated.", "logo-success") + finally: + # Restore original image to logo location for subsequent tests + with io.open(logo_image_location, 'wb') as logo_file: + logo_file.write(original_image) - def test_logo_upload_with_invalid_filetype_fails(self): - self._login_admin() - form = journalist_app.forms.LogoForm( - logo=(StringIO('filedata'), 'bad.exe') +def test_logo_upload_with_invalid_filetype_fails(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + + form = journalist_app_module.forms.LogoForm( + logo=(BytesIO(b'filedata'), 'bad.exe') ) - resp = self.client.post(url_for('admin.manage_config'), - data=form.data, - follow_redirects=True) + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.manage_config'), + data=form.data, + follow_redirects=True) + ins.assert_message_flashed("You can only upload PNG image files.", + "logo-error") + text = resp.data.decode('utf-8') + assert "You can only upload PNG image files." in text - self.assertIn('Upload images only.', resp.data) - def test_logo_upload_with_empty_input_field_fails(self): - self._login_admin() +def test_creation_of_ossec_test_log_event(journalist_app, test_admin, mocker): + mocked_error_logger = mocker.patch('journalist.app.logger.error') + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) + app.get(url_for('admin.ossec_test')) - form = journalist_app.forms.LogoForm( - logo=(StringIO(''), '') - ) - resp = self.client.post(url_for('admin.manage_config'), - data=form.data, - follow_redirects=True) + mocked_error_logger.assert_called_once_with( + "This is a test OSSEC alert" + ) - self.assertIn('File required.', resp.data) - @patch('journalist.app.logger.error') - def test_creation_of_ossec_test_log_event(self, mocked_error_logger): - self._login_admin() - self.client.get(url_for('admin.ossec_test')) +def test_logo_upload_with_empty_input_field_fails(journalist_app, test_admin): + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], + test_admin['otp_secret']) - mocked_error_logger.assert_called_once_with( - "This is a test OSSEC alert" + form = journalist_app_module.forms.LogoForm( + logo=(BytesIO(b''), '') ) - def test_admin_page_restriction_http_gets(self): - admin_urls = [url_for('admin.index'), url_for('admin.add_user'), - url_for('admin.edit_user', user_id=self.user.id)] + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('admin.manage_config'), + data=form.data, + follow_redirects=True) + + ins.assert_message_flashed("File required.", "logo-error") + assert 'File required.' in resp.data.decode('utf-8') - self._login_user() + +def test_admin_page_restriction_http_gets(journalist_app, test_journo): + admin_urls = [url_for('admin.index'), url_for('admin.add_user'), + url_for('admin.edit_user', user_id=test_journo['id'])] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) for admin_url in admin_urls: - resp = self.client.get(admin_url) - self.assertStatus(resp, 302) - - def test_admin_page_restriction_http_posts(self): - admin_urls = [url_for('admin.reset_two_factor_totp'), - url_for('admin.reset_two_factor_hotp'), - url_for('admin.add_user', user_id=self.user.id), - url_for('admin.new_user_two_factor'), - url_for('admin.reset_two_factor_totp'), - url_for('admin.reset_two_factor_hotp'), - url_for('admin.edit_user', user_id=self.user.id), - url_for('admin.delete_user', user_id=self.user.id)] - self._login_user() + resp = app.get(admin_url) + assert resp.status_code == 302 + + +def test_admin_page_restriction_http_posts(journalist_app, test_journo): + admin_urls = [url_for('admin.reset_two_factor_totp'), + url_for('admin.reset_two_factor_hotp'), + url_for('admin.add_user', user_id=test_journo['id']), + url_for('admin.new_user_two_factor'), + url_for('admin.reset_two_factor_totp'), + url_for('admin.reset_two_factor_hotp'), + url_for('admin.edit_user', user_id=test_journo['id']), + url_for('admin.delete_user', user_id=test_journo['id'])] + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) for admin_url in admin_urls: - resp = self.client.post(admin_url) - self.assertStatus(resp, 302) + resp = app.post(admin_url) + assert resp.status_code == 302 - def test_user_authorization_for_gets(self): - urls = [url_for('main.index'), url_for('col.col', filesystem_id='1'), - url_for('col.download_single_submission', - filesystem_id='1', fn='1'), - url_for('account.edit')] +def test_user_authorization_for_gets(journalist_app): + urls = [url_for('main.index'), url_for('col.col', filesystem_id='1'), + url_for('col.download_single_file', + filesystem_id='1', fn='1'), + url_for('account.edit')] + + with journalist_app.test_client() as app: for url in urls: - resp = self.client.get(url) - self.assertStatus(resp, 302) - - def test_user_authorization_for_posts(self): - urls = [url_for('col.add_star', filesystem_id='1'), - url_for('col.remove_star', filesystem_id='1'), - url_for('col.process'), - url_for('col.delete_single', filesystem_id='1'), - url_for('main.reply'), - url_for('main.regenerate_code'), - url_for('main.bulk'), - url_for('account.new_two_factor'), - url_for('account.reset_two_factor_totp'), - url_for('account.reset_two_factor_hotp')] + resp = app.get(url) + assert resp.status_code == 302 + + +def test_user_authorization_for_posts(journalist_app): + urls = [url_for('col.add_star', filesystem_id='1'), + url_for('col.remove_star', filesystem_id='1'), + url_for('col.process'), + url_for('col.delete_single', filesystem_id='1'), + url_for('main.reply'), + url_for('main.bulk'), + url_for('account.new_two_factor'), + url_for('account.reset_two_factor_totp'), + url_for('account.reset_two_factor_hotp'), + url_for('account.change_name')] + with journalist_app.test_client() as app: for url in urls: - res = self.client.post(url) - self.assertStatus(res, 302) + resp = app.post(url) + assert resp.status_code == 302 - def test_incorrect_current_password_change(self): - self._login_user() - resp = self.client.post(url_for('account.new_password'), - data=dict(password=VALID_PASSWORD, - token='mocked', - current_password='badpw'), - follow_redirects=True) - text = resp.data.decode('utf-8') - self.assertIn('Incorrect password or two-factor code', text) +def test_incorrect_current_password_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.post(url_for('account.new_password'), + data=dict(password=VALID_PASSWORD, + token='mocked', + current_password='badpw'), + follow_redirects=True) - def test_too_long_user_password_change(self): - self._login_user() + text = resp.data.decode('utf-8') + assert 'Incorrect password or two-factor code' in text - overly_long_password = VALID_PASSWORD + \ - 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) - self.client.post(url_for('account.new_password'), - data=dict(password=overly_long_password, - token='mocked', - current_password=self.user_pw), - follow_redirects=True) +# need a journalist app for the app context +def test_passphrase_migration_on_verification(journalist_app): + salt = b64decode('+mGOQmD5Nnb+mH9gwBoxKRhKZmmJ6BzpmD5YArPHZsY=') + journalist = Journalist('test', VALID_PASSWORD) - self.assertMessageFlashed('You submitted a bad password! Password not ' - 'changed.', 'error') + # manually set the params + hash = journalist._scrypt_hash(VALID_PASSWORD, salt) + journalist.passphrase_hash = None + journalist.pw_salt = salt + journalist.pw_hash = hash - def test_valid_user_password_change(self): - self._login_user() - resp = self.client.post( - url_for('account.new_password'), - data=dict(password=VALID_PASSWORD_2, - token='mocked', - current_password=self.user_pw), - follow_redirects=True) + assert journalist.valid_password(VALID_PASSWORD) - assert 'Password updated.' in \ - resp.data.decode('utf-8') + # check that the migration happened + assert journalist.passphrase_hash is not None + assert journalist.pw_salt is None + assert journalist.pw_hash is None - def test_regenerate_totp(self): - self._login_user() - old_totp = self.user.totp + # check that that a verification post-migration works + assert journalist.valid_password(VALID_PASSWORD) - res = self.client.post(url_for('account.reset_two_factor_totp')) - new_totp = self.user.totp - # check that totp is different - self.assertNotEqual(old_totp.secret, new_totp.secret) +# need a journalist app for the app context +def test_passphrase_migration_on_reset(journalist_app): + salt = b64decode('+mGOQmD5Nnb+mH9gwBoxKRhKZmmJ6BzpmD5YArPHZsY=') + journalist = Journalist('test', VALID_PASSWORD) - # should redirect to verification page - self.assertRedirects(res, url_for('account.new_two_factor')) + # manually set the params + hash = journalist._scrypt_hash(VALID_PASSWORD, salt) + journalist.passphrase_hash = None + journalist.pw_salt = salt + journalist.pw_hash = hash - def test_edit_hotp(self): - self._login_user() - old_hotp = self.user.hotp + journalist.set_password(VALID_PASSWORD) - res = self.client.post( - url_for('account.reset_two_factor_hotp'), - data=dict(otp_secret=123456) - ) - new_hotp = self.user.hotp - - # check that hotp is different - self.assertNotEqual(old_hotp.secret, new_hotp.secret) - - # should redirect to verification page - self.assertRedirects(res, url_for('account.new_two_factor')) - - def test_delete_source_deletes_submissions(self): - """Verify that when a source is deleted, the submissions that - correspond to them are also deleted.""" - - self._delete_collection_setup() - journalist_app.utils.delete_collection(self.source.filesystem_id) - - # Source should be gone - results = db_session.query(Source).filter( - Source.id == self.source.id).all() - self.assertEqual(results, []) - - def _delete_collection_setup(self): - self.source, _ = utils.db_helper.init_source() - utils.db_helper.submit(self.source, 2) - utils.db_helper.reply(self.user, self.source, 2) - - def test_delete_collection_updates_db(self): - """Verify that when a source is deleted, their Source identity - record, as well as Reply & Submission records associated with - that record are purged from the database.""" - self._delete_collection_setup() - journalist_app.utils.delete_collection(self.source.filesystem_id) - results = Source.query.filter(Source.id == self.source.id).all() - self.assertEqual(results, []) - results = db_session.query( - Submission.source_id == self.source.id).all() - self.assertEqual(results, []) - results = db_session.query(Reply.source_id == self.source.id).all() - self.assertEqual(results, []) - - def test_delete_source_deletes_source_key(self): - """Verify that when a source is deleted, the PGP key that corresponds - to them is also deleted.""" - self._delete_collection_setup() + # check that the migration happened + assert journalist.passphrase_hash is not None + assert journalist.pw_salt is None + assert journalist.pw_hash is None - # Source key exists - source_key = crypto_util.getkey(self.source.filesystem_id) - self.assertNotEqual(source_key, None) + # check that that a verification post-migration works + assert journalist.valid_password(VALID_PASSWORD) - journalist_app.utils.delete_collection(self.source.filesystem_id) - # Source key no longer exists - source_key = crypto_util.getkey(self.source.filesystem_id) - self.assertEqual(source_key, None) +def test_journalist_reply_view(journalist_app, test_source, test_journo): + source, _ = utils.db_helper.init_source() + journalist, _ = utils.db_helper.init_journalist() + submissions = utils.db_helper.submit(source, 1) + replies = utils.db_helper.reply(journalist, source, 1) + + subm_url = url_for('col.download_single_file', + filesystem_id=submissions[0].source.filesystem_id, + fn=submissions[0].filename) + reply_url = url_for('col.download_single_file', + filesystem_id=replies[0].source.filesystem_id, + fn=replies[0].filename) + + with journalist_app.test_client() as app: + resp = app.get(subm_url) + assert resp.status_code == 302 + resp = app.get(reply_url) + assert resp.status_code == 302 + + +def test_too_long_user_password_change(journalist_app, test_journo): + overly_long_password = VALID_PASSWORD + \ + 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('account.new_password'), + data=dict(password=overly_long_password, + token=TOTP(test_journo['otp_secret']).now(), + current_password=test_journo['password']), + follow_redirects=True) + + ins.assert_message_flashed( + 'You submitted a bad password! Password not changed.', 'error') + + +def test_valid_user_password_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + resp = app.post(url_for('account.new_password'), + data=dict(password=VALID_PASSWORD_2, + token=TOTP(test_journo['otp_secret']).now(), + current_password=test_journo['password']), + follow_redirects=True) + + assert 'Password updated.' in resp.data.decode('utf-8') + + +def test_valid_user_first_last_name_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) - def test_delete_source_deletes_docs_on_disk(self): - """Verify that when a source is deleted, the encrypted documents that - exist on disk is also deleted.""" - self._delete_collection_setup() + resp = app.post(url_for('account.change_name'), + data=dict(first_name='test', + last_name='test'), + follow_redirects=True) - # Encrypted documents exists - dir_source_docs = os.path.join(config.STORE_DIR, - self.source.filesystem_id) - self.assertTrue(os.path.exists(dir_source_docs)) + assert 'Name updated.' in resp.data.decode('utf-8') - job = journalist_app.utils.delete_collection(self.source.filesystem_id) - # Wait up to 5s to wait for Redis worker `srm` operation to complete - utils.async.wait_for_redis_worker(job) +def test_valid_user_invalid_first_last_name_change(journalist_app, test_journo): + with journalist_app.test_client() as app: + overly_long_name = 'a' * (Journalist.MAX_NAME_LEN + 1) + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) - # Encrypted documents no longer exist - self.assertFalse(os.path.exists(dir_source_docs)) + resp = app.post(url_for('account.change_name'), + data=dict(first_name=overly_long_name, + last_name=overly_long_name), + follow_redirects=True) - def test_download_selected_submissions_from_source(self): - source, _ = utils.db_helper.init_source() - submissions = utils.db_helper.submit(source, 4) - selected_submissions = random.sample(submissions, 2) - selected_fnames = [submission.filename - for submission in selected_submissions] - selected_fnames.sort() + assert 'Name not updated' in resp.data.decode('utf-8') - self._login_user() - resp = self.client.post( + +def test_regenerate_totp(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.reset_two_factor_totp')) + + new_secret = Journalist.query.get(test_journo['id']).otp_secret + + # check that totp is different + assert new_secret != old_secret + + # should redirect to verification page + ins.assert_redirects(resp, url_for('account.new_two_factor')) + + +def test_edit_hotp(journalist_app, test_journo): + old_secret = test_journo['otp_secret'] + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('account.reset_two_factor_hotp'), + data=dict(otp_secret=123456)) + + new_secret = Journalist.query.get(test_journo['id']).otp_secret + + # check that totp is different + assert new_secret != old_secret + + # should redirect to verification page + ins.assert_redirects(resp, url_for('account.new_two_factor')) + + +def test_delete_source_deletes_submissions(journalist_app, + test_journo, + test_source): + """Verify that when a source is deleted, the submissions that + correspond to them are also deleted.""" + + with journalist_app.app_context(): + source = Source.query.get(test_source['id']) + journo = Journalist.query.get(test_journo['id']) + + utils.db_helper.submit(source, 2) + utils.db_helper.reply(journo, source, 2) + + journalist_app_module.utils.delete_collection( + test_source['filesystem_id']) + + res = Source.query.filter_by(id=test_source['id']).one_or_none() + assert res is None + + +def test_delete_collection_updates_db(journalist_app, + test_journo, + test_source): + """Verify that when a source is deleted, their Source identity + record, as well as Reply & Submission records associated with + that record are purged from the database.""" + + with journalist_app.app_context(): + source = Source.query.get(test_source['id']) + journo = Journalist.query.get(test_journo['id']) + + utils.db_helper.submit(source, 2) + utils.db_helper.reply(journo, source, 2) + + journalist_app_module.utils.delete_collection( + test_source['filesystem_id']) + res = Source.query.filter_by(id=test_source['id']).one_or_none() + assert res is None + + res = Submission.query.filter_by(source_id=test_source['id']) \ + .one_or_none() + assert res is None + + res = Reply.query.filter_by(source_id=test_source['id']) \ + .one_or_none() + assert res is None + + +def test_delete_source_deletes_source_key(journalist_app, + test_source, + test_journo): + """Verify that when a source is deleted, the PGP key that corresponds + to them is also deleted.""" + + with journalist_app.app_context(): + source = Source.query.get(test_source['id']) + journo = Journalist.query.get(test_journo['id']) + + utils.db_helper.submit(source, 2) + utils.db_helper.reply(journo, source, 2) + + # Source key exists + source_key = current_app.crypto_util.getkey( + test_source['filesystem_id']) + assert source_key is not None + + journalist_app_module.utils.delete_collection( + test_source['filesystem_id']) + + # Source key no longer exists + source_key = current_app.crypto_util.getkey( + test_source['filesystem_id']) + assert source_key is None + + +def test_delete_source_deletes_docs_on_disk(journalist_app, + test_source, + test_journo, + config): + """Verify that when a source is deleted, the encrypted documents that + exist on disk is also deleted.""" + + with journalist_app.app_context(): + source = Source.query.get(test_source['id']) + journo = Journalist.query.get(test_journo['id']) + + utils.db_helper.submit(source, 2) + utils.db_helper.reply(journo, source, 2) + + dir_source_docs = os.path.join(config.STORE_DIR, test_source['filesystem_id']) + assert os.path.exists(dir_source_docs) + + journalist_app_module.utils.delete_collection(test_source['filesystem_id']) + + def assertion(): + assert not os.path.exists(dir_source_docs) + + utils.asynchronous.wait_for_assertion(assertion) + + +def test_login_with_invalid_password_doesnt_call_argon2(mocker, test_journo): + mock_argon2 = mocker.patch('models.argon2.verify') + invalid_pw = 'a'*(Journalist.MAX_PASSWORD_LEN + 1) + + with pytest.raises(InvalidPasswordLength): + Journalist.login(test_journo['username'], + invalid_pw, + TOTP(test_journo['otp_secret']).now()) + assert not mock_argon2.called + + +def test_valid_login_calls_argon2(mocker, test_journo): + mock_argon2 = mocker.patch('models.argon2.verify') + Journalist.login(test_journo['username'], + test_journo['password'], + TOTP(test_journo['otp_secret']).now()) + assert mock_argon2.called + + +def test_render_locales(config, journalist_app, test_journo, test_source): + """the locales.html template must collect both request.args (l=XX) and + request.view_args (/<filesystem_id>) to build the URL to + change the locale + """ + + # We use the `journalist_app` fixture to generate all our tables, but we + # don't use it during the test because we need to inject the i18n settings + # (which are only triggered during `create_app` + config.SUPPORTED_LOCALES = ['en_US', 'fr_FR'] + app = journalist_app_module.create_app(config) + app.config['SERVER_NAME'] = 'localhost.localdomain' # needed for url_for + url = url_for('col.col', filesystem_id=test_source['filesystem_id']) + + # we need the relative URL, not the full url including proto / localhost + url_end = url.replace('http://', '') + url_end = url_end[url_end.index('/') + 1:] + + with app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.get(url + '?l=fr_FR') + + # check that links to i18n URLs are/aren't present + text = resp.data.decode('utf-8') + assert '?l=fr_FR' not in text, text + assert url_end + '?l=en_US' in text, text + + +def test_download_selected_submissions_from_source(journalist_app, + test_journo, + test_source): + source = Source.query.get(test_source['id']) + submissions = utils.db_helper.submit(source, 4) + selected_submissions = random.sample(submissions, 2) + selected_fnames = [submission.filename + for submission in selected_submissions] + selected_fnames.sort() + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.post( '/bulk', data=dict(action='download', - filesystem_id=source.filesystem_id, + filesystem_id=test_source['filesystem_id'], doc_names_selected=selected_fnames)) - # The download request was succesful, and the app returned a zipfile - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.content_type, 'application/zip') - self.assertTrue(zipfile.is_zipfile(StringIO(resp.data))) - - # The submissions selected are in the zipfile - for filename in selected_fnames: - self.assertTrue( - # Check that the expected filename is in the zip file - zipfile.ZipFile(StringIO(resp.data)).getinfo( - os.path.join( - source.journalist_filename, - "%s_%s" % (filename.split('-')[0], - source.last_updated.date()), - filename - )) - ) - - # The submissions not selected are absent from the zipfile - not_selected_submissions = set(submissions).difference( - selected_submissions) - not_selected_fnames = [submission.filename - for submission in not_selected_submissions] - - for filename in not_selected_fnames: - with self.assertRaises(KeyError): - zipfile.ZipFile(StringIO(resp.data)).getinfo( - os.path.join( - source.journalist_filename, - source.journalist_designation, - "%s_%s" % (filename.split('-')[0], - source.last_updated.date()), - filename - )) - - def _bulk_download_setup(self): - """Create a couple sources, make some submissions on their behalf, - mark some of them as downloaded, and then perform *action* on all - sources.""" - self.source0, _ = utils.db_helper.init_source() - self.source1, _ = utils.db_helper.init_source() - self.journo0, _ = utils.db_helper.init_journalist() - self.submissions0 = utils.db_helper.submit(self.source0, 2) - self.submissions1 = utils.db_helper.submit(self.source1, 3) - self.downloaded0 = random.sample(self.submissions0, 1) - utils.db_helper.mark_downloaded(*self.downloaded0) - self.not_downloaded0 = set(self.submissions0).difference( - self.downloaded0) - self.downloaded1 = random.sample(self.submissions1, 2) - utils.db_helper.mark_downloaded(*self.downloaded1) - self.not_downloaded1 = set(self.submissions1).difference( - self.downloaded1) - - def test_download_unread_all_sources(self): - self._bulk_download_setup() - self._login_user() + # The download request was succesful, and the app returned a zipfile + assert resp.status_code == 200 + assert resp.content_type == 'application/zip' + assert zipfile.is_zipfile(BytesIO(resp.data)) + + # The submissions selected are in the zipfile + for filename in selected_fnames: + # Check that the expected filename is in the zip file + zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + source.journalist_filename, + "%s_%s" % (filename.split('-')[0], + source.last_updated.date()), + filename + )) + assert zipinfo + + # The submissions not selected are absent from the zipfile + not_selected_submissions = set(submissions).difference( + selected_submissions) + not_selected_fnames = [submission.filename + for submission in not_selected_submissions] + + for filename in not_selected_fnames: + with pytest.raises(KeyError): + zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + source.journalist_filename, + source.journalist_designation, + "%s_%s" % (filename.split('-')[0], + source.last_updated.date()), + filename + )) + + +def _bulk_download_setup(journo): + """Create a couple sources, make some submissions on their behalf, + mark some of them as downloaded""" + + source0, _ = utils.db_helper.init_source() + source1, _ = utils.db_helper.init_source() + + submissions0 = utils.db_helper.submit(source0, 2) + submissions1 = utils.db_helper.submit(source1, 3) + + downloaded0 = random.sample(submissions0, 1) + utils.db_helper.mark_downloaded(*downloaded0) + not_downloaded0 = set(submissions0).difference(downloaded0) + + downloaded1 = random.sample(submissions1, 2) + utils.db_helper.mark_downloaded(*downloaded1) + not_downloaded1 = set(submissions1).difference(downloaded1) + + return { + 'source0': source0, + 'source1': source1, + 'submissions0': submissions0, + 'submissions1': submissions1, + 'downloaded0': downloaded0, + 'downloaded1': downloaded1, + 'not_downloaded0': not_downloaded0, + 'not_downloaded1': not_downloaded1, + } + + +def test_download_unread_all_sources(journalist_app, test_journo): + bulk = _bulk_download_setup(Journalist.query.get(test_journo['id'])) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) # Download all unread messages from all sources - self.resp = self.client.post( + resp = app.post( url_for('col.process'), data=dict(action='download-unread', - cols_selected=[self.source0.filesystem_id, - self.source1.filesystem_id])) - - # The download request was succesful, and the app returned a zipfile - self.assertEqual(self.resp.status_code, 200) - self.assertEqual(self.resp.content_type, 'application/zip') - self.assertTrue(zipfile.is_zipfile(StringIO(self.resp.data))) - - # All the not dowloaded submissions are in the zipfile - for submission in self.not_downloaded0: - self.assertTrue( - zipfile.ZipFile(StringIO(self.resp.data)).getinfo( - os.path.join( - "unread", - self.source0.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source0.last_updated.date()), - submission.filename - )) - ) - for submission in self.not_downloaded1: - self.assertTrue( - zipfile.ZipFile(StringIO(self.resp.data)).getinfo( - os.path.join( - "unread", - self.source1.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source1.last_updated.date()), - submission.filename - )) - ) - - # All the downloaded submissions are absent from the zipfile - for submission in self.downloaded0: - with self.assertRaises(KeyError): - zipfile.ZipFile(StringIO(self.resp.data)).getinfo( - os.path.join( - "unread", - self.source0.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source0.last_updated.date()), - submission.filename - )) - - for submission in self.downloaded1: - with self.assertRaises(KeyError): - zipfile.ZipFile(StringIO(self.resp.data)).getinfo( - os.path.join( - "unread", - self.source1.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source1.last_updated.date()), - submission.filename - )) - - def test_download_all_selected_sources(self): - self._bulk_download_setup() - self._login_user() - - # Dowload all messages from self.source1 - self.resp = self.client.post( + cols_selected=[bulk['source0'].filesystem_id, + bulk['source1'].filesystem_id])) + + # The download request was succesful, and the app returned a zipfile + assert resp.status_code == 200 + assert resp.content_type == 'application/zip' + assert zipfile.is_zipfile(BytesIO(resp.data)) + + # All the not dowloaded submissions are in the zipfile + for submission in bulk['not_downloaded0']: + zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo(os.path.join( + "unread", + bulk['source0'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], bulk['source0'].last_updated.date()), + submission.filename)) + assert zipinfo + + for submission in bulk['not_downloaded1']: + zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + "unread", + bulk['source1'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], + bulk['source1'].last_updated.date()), + submission.filename + )) + assert zipinfo + + # All the downloaded submissions are absent from the zipfile + for submission in bulk['downloaded0']: + with pytest.raises(KeyError): + zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + "unread", + bulk['source0'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], + bulk['source0'].last_updated.date()), + submission.filename + )) + + for submission in bulk['downloaded1']: + with pytest.raises(KeyError): + zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + "unread", + bulk['source1'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], + bulk['source1'].last_updated.date()), + submission.filename + )) + + +def test_download_all_selected_sources(journalist_app, test_journo): + bulk = _bulk_download_setup(Journalist.query.get(test_journo['id'])) + + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + + # Dowload all messages from source1 + resp = app.post( url_for('col.process'), data=dict(action='download-all', - cols_selected=[self.source1.filesystem_id])) + cols_selected=[bulk['source1'].filesystem_id])) - resp = self.client.post( + resp = app.post( url_for('col.process'), data=dict(action='download-all', - cols_selected=[self.source1.filesystem_id])) - - # The download request was succesful, and the app returned a zipfile - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.content_type, 'application/zip') - self.assertTrue(zipfile.is_zipfile(StringIO(resp.data))) - - # All messages from self.source1 are in the zipfile - for submission in self.submissions1: - self.assertTrue( - zipfile.ZipFile(StringIO(resp.data)).getinfo( - os.path.join( - "all", - self.source1.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source1.last_updated.date()), - submission.filename) - ) + cols_selected=[bulk['source1'].filesystem_id])) + + # The download request was succesful, and the app returned a zipfile + assert resp.status_code == 200 + assert resp.content_type == 'application/zip' + assert zipfile.is_zipfile(BytesIO(resp.data)) + + # All messages from source1 are in the zipfile + for submission in bulk['submissions1']: + zipinfo = zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + "all", + bulk['source1'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], + bulk['source1'].last_updated.date()), + submission.filename) + ) + assert zipinfo + + # All messages from source0 are absent from the zipfile + for submission in bulk['submissions0']: + with pytest.raises(KeyError): + zipfile.ZipFile(BytesIO(resp.data)).getinfo( + os.path.join( + "all", + bulk['source0'].journalist_designation, + "%s_%s" % (submission.filename.split('-')[0], + bulk['source0'].last_updated.date()), + submission.filename) ) - # All messages from self.source0 are absent from the zipfile - for submission in self.submissions0: - with self.assertRaises(KeyError): - zipfile.ZipFile(StringIO(resp.data)).getinfo( - os.path.join( - "all", - self.source0.journalist_designation, - "%s_%s" % (submission.filename.split('-')[0], - self.source0.last_updated.date()), - submission.filename) - ) - - def test_single_source_is_successfully_starred(self): - source, _ = utils.db_helper.init_source() - self._login_user() - resp = self.client.post(url_for('col.add_star', - filesystem_id=source.filesystem_id)) - - self.assertRedirects(resp, url_for('main.index')) - - # Assert source is starred - self.assertTrue(source.star.starred) - - def test_single_source_is_successfully_unstarred(self): - source, _ = utils.db_helper.init_source() - self._login_user() +def test_single_source_is_successfully_starred(journalist_app, + test_journo, + test_source): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('col.add_star', + filesystem_id=test_source['filesystem_id'])) + + ins.assert_redirects(resp, url_for('main.index')) + + source = Source.query.get(test_source['id']) + assert source.star.starred + + +def test_single_source_is_successfully_unstarred(journalist_app, + test_journo, + test_source): + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) # First star the source - self.client.post(url_for('col.add_star', - filesystem_id=source.filesystem_id)) + app.post(url_for('col.add_star', + filesystem_id=test_source['filesystem_id'])) + + with InstrumentedApp(journalist_app) as ins: + # Now unstar the source + resp = app.post( + url_for('col.remove_star', + filesystem_id=test_source['filesystem_id'])) + + ins.assert_redirects(resp, url_for('main.index')) + + source = Source.query.get(test_source['id']) + assert not source.star.starred + + +def test_journalist_session_expiration(config, journalist_app, test_journo): + # set the expiration to ensure we trigger an expiration + config.SESSION_EXPIRATION_MINUTES = -1 + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + login_data = { + 'username': test_journo['username'], + 'password': test_journo['password'], + 'token': TOTP(test_journo['otp_secret']).now(), + } + resp = app.post(url_for('main.login'), data=login_data) + ins.assert_redirects(resp, url_for('main.index')) + assert 'uid' in session + + resp = app.get(url_for('account.edit'), follow_redirects=True) + + # check that the session was cleared (apart from 'expires' + # which is always present and 'csrf_token' which leaks no info) + session.pop('expires', None) + session.pop('csrf_token', None) + assert not session, session + assert ('You have been logged out due to inactivity' in + resp.data.decode('utf-8')) + + +def test_csrf_error_page(journalist_app): + journalist_app.config['WTF_CSRF_ENABLED'] = True + with journalist_app.test_client() as app: + with InstrumentedApp(journalist_app) as ins: + resp = app.post(url_for('main.login')) + ins.assert_redirects(resp, url_for('main.login')) + + resp = app.post(url_for('main.login'), follow_redirects=True) - # Now unstar the source - resp = self.client.post(url_for('col.remove_star', - filesystem_id=source.filesystem_id)) - - self.assertRedirects(resp, url_for('main.index')) - - # Assert source is not starred - self.assertFalse(source.star.starred) - - def test_journalist_session_expiration(self): - try: - old_expiration = config.SESSION_EXPIRATION_MINUTES - has_session_expiration = True - except AttributeError: - has_session_expiration = False - - try: - with self.client as client: - # set the expiration to ensure we trigger an expiration - config.SESSION_EXPIRATION_MINUTES = -1 - - # do a real login to get a real session - # (none of the mocking `g` hacks) - resp = client.post(url_for('main.login'), - data=dict(username=self.user.username, - password=self.user_pw, - token='mocked')) - self.assertRedirects(resp, url_for('main.index')) - assert 'uid' in session - - resp = client.get(url_for('account.edit'), - follow_redirects=True) + text = resp.data.decode('utf-8') + assert 'You have been logged out due to inactivity' in text - # check that the session was cleared (apart from 'expires' - # which is always present and 'csrf_token' which leaks no info) - session.pop('expires', None) - session.pop('csrf_token', None) - assert not session, session - assert ('You have been logged out due to inactivity' in - resp.data.decode('utf-8')) - finally: - if has_session_expiration: - config.SESSION_EXPIRATION_MINUTES = old_expiration - else: - del config.SESSION_EXPIRATION_MINUTES - - def test_csrf_error_page(self): - old_enabled = self.app.config['WTF_CSRF_ENABLED'] - self.app.config['WTF_CSRF_ENABLED'] = True - - try: - with self.app.test_client() as app: - resp = app.post(url_for('main.login')) - self.assertRedirects(resp, url_for('main.login')) - - resp = app.post(url_for('main.login'), follow_redirects=True) - self.assertIn('You have been logged out due to inactivity', - resp.data) - finally: - self.app.config['WTF_CSRF_ENABLED'] = old_enabled - - def test_col_process_aborts_with_bad_action(self): - """If the action is not a valid choice, a 500 should occur""" - self._login_user() + +def test_col_process_aborts_with_bad_action(journalist_app, test_journo): + """If the action is not a valid choice, a 500 should occur""" + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) form_data = {'cols_selected': 'does not matter', 'action': 'this action does not exist'} - resp = self.client.post(url_for('col.process'), data=form_data) + resp = app.post(url_for('col.process'), data=form_data) + assert resp.status_code == 500 - self.assert500(resp) - def test_col_process_successfully_deletes_multiple_sources(self): - # Create two sources with one submission each - source_1, _ = utils.db_helper.init_source() - utils.db_helper.submit(source_1, 1) - source_2, _ = utils.db_helper.init_source() - utils.db_helper.submit(source_2, 1) +def test_col_process_successfully_deletes_multiple_sources(journalist_app, + test_journo): + # Create two sources with one submission each + source_1, _ = utils.db_helper.init_source() + utils.db_helper.submit(source_1, 1) + source_2, _ = utils.db_helper.init_source() + utils.db_helper.submit(source_2, 1) - self._login_user() + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) form_data = {'cols_selected': [source_1.filesystem_id, source_2.filesystem_id], 'action': 'delete'} - resp = self.client.post(url_for('col.process'), data=form_data, - follow_redirects=True) + resp = app.post(url_for('col.process'), data=form_data, + follow_redirects=True) + + assert resp.status_code == 200 - self.assert200(resp) + # Verify there are no remaining sources + remaining_sources = Source.query.all() + assert not remaining_sources - # Verify there are no remaining sources - remaining_sources = db_session.query(db.Source).all() - self.assertEqual(len(remaining_sources), 0) - def test_col_process_successfully_stars_sources(self): - source_1, _ = utils.db_helper.init_source() - utils.db_helper.submit(source_1, 1) +def test_col_process_successfully_stars_sources(journalist_app, + test_journo, + test_source): + utils.db_helper.submit(test_source['source'], 1) - self._login_user() + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) - form_data = {'cols_selected': [source_1.filesystem_id], + form_data = {'cols_selected': [test_source['filesystem_id']], 'action': 'star'} - resp = self.client.post(url_for('col.process'), data=form_data, - follow_redirects=True) + resp = app.post(url_for('col.process'), data=form_data, + follow_redirects=True) + assert resp.status_code == 200 - self.assert200(resp) + source = Source.query.get(test_source['id']) + assert source.star.starred - # Verify the source is starred - self.assertTrue(source_1.star.starred) - def test_col_process_successfully_unstars_sources(self): - source_1, _ = utils.db_helper.init_source() - utils.db_helper.submit(source_1, 1) +def test_col_process_successfully_unstars_sources(journalist_app, + test_journo, + test_source): + utils.db_helper.submit(test_source['source'], 1) - self._login_user() + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) # First star the source - form_data = {'cols_selected': [source_1.filesystem_id], + form_data = {'cols_selected': [test_source['filesystem_id']], 'action': 'star'} - self.client.post(url_for('col.process'), data=form_data, - follow_redirects=True) + app.post(url_for('col.process'), data=form_data, + follow_redirects=True) # Now unstar the source - form_data = {'cols_selected': [source_1.filesystem_id], + form_data = {'cols_selected': [test_source['filesystem_id']], 'action': 'un-star'} - resp = self.client.post(url_for('col.process'), data=form_data, - follow_redirects=True) - - self.assert200(resp) - - # Verify the source is not starred - self.assertFalse(source_1.star.starred) - - -class TestJournalistLocale(TestCase): - - def setUp(self): - utils.env.setup() - - # Patch the two-factor verification to avoid intermittent errors - utils.db_helper.mock_verify_token(self) - - # Setup test user - self.user, self.user_pw = utils.db_helper.init_journalist() - - def tearDown(self): - utils.env.teardown() - - def get_fake_config(self): - class Config: - def __getattr__(self, name): - return getattr(config, name) - return Config() - - # A method required by flask_testing.TestCase - def create_app(self): - fake_config = self.get_fake_config() - fake_config.SUPPORTED_LOCALES = ['en_US', 'fr_FR'] - return journalist_app.create_app(fake_config) + resp = app.post(url_for('col.process'), data=form_data, + follow_redirects=True) - def test_render_locales(self): - """the locales.html template must collect both request.args (l=XX) and - request.view_args (/<filesystem_id>) to build the URL to - change the locale + assert resp.status_code == 200 - """ - source, _ = utils.db_helper.init_source() - self._ctx.g.user = self.user + source = Source.query.get(test_source['id']) + assert not source.star.starred - url = url_for('col.col', filesystem_id=source.filesystem_id) - resp = self.client.get(url + '?l=fr_FR') - self.assertNotIn('?l=fr_FR', resp.data) - self.assertIn(url + '?l=en_US', resp.data) +def test_source_with_null_last_updated(journalist_app, + test_journo, + test_files): + '''Regression test for issues #3862''' -class TestJournalistLogin(unittest.TestCase): + source = test_files['source'] + source.last_updated = None + db.session.add(source) + db.session.commit() - def setUp(self): - utils.env.setup() + with journalist_app.test_client() as app: + _login_user(app, test_journo['username'], test_journo['password'], + test_journo['otp_secret']) + resp = app.get(url_for('main.index')) + assert resp.status_code == 200 - # Patch the two-factor verification so it always succeeds - utils.db_helper.mock_verify_token(self) - self.user, self.user_pw = utils.db_helper.init_journalist() +def test_does_set_cookie_headers(journalist_app, test_journo): + with journalist_app.test_client() as app: + response = app.get(url_for('main.login')) - def tearDown(self): - utils.env.teardown() - # TODO: figure out why this is necessary here, but unnecessary in all - # of the tests in `tests/test_unit_*.py`. Without this, the session - # continues to return values even if the underlying database is deleted - # (as in `shared_teardown`). - db_session.remove() + observed_headers = response.headers + assert 'Set-Cookie' in list(observed_headers.keys()) + assert 'Cookie' in observed_headers['Vary'] - @patch('db.Journalist._scrypt_hash') - @patch('db.Journalist.valid_password', return_value=True) - def test_valid_login_calls_scrypt(self, - mock_scrypt_hash, - mock_valid_password): - Journalist.login(self.user.username, self.user_pw, 'mocked') - self.assertTrue( - mock_scrypt_hash.called, - "Failed to call _scrypt_hash for password w/ valid length") - @patch('db.Journalist._scrypt_hash') - def test_login_with_invalid_password_doesnt_call_scrypt(self, - mock_scrypt_hash): - invalid_pw = 'a'*(Journalist.MAX_PASSWORD_LEN + 1) - with self.assertRaises(InvalidPasswordLength): - Journalist.login(self.user.username, invalid_pw, 'mocked') - self.assertFalse( - mock_scrypt_hash.called, - "Called _scrypt_hash for password w/ invalid length") +def test_app_error_handlers_defined(journalist_app): + for status_code in [400, 401, 403, 404, 500]: + # This will raise KeyError if an app-wide error handler is not defined + assert journalist_app.error_handler_spec[None][status_code] diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_journalist_api.py @@ -0,0 +1,962 @@ +# -*- coding: utf-8 -*- +import json +import os +import random + +from pyotp import TOTP +from uuid import UUID, uuid4 + +from flask import current_app, url_for +from itsdangerous import TimedJSONWebSignatureSerializer + +from db import db +from models import Journalist, Reply, Source, SourceStar, Submission, RevokedToken + +os.environ['SECUREDROP_ENV'] = 'test' # noqa +from .utils.api_helper import get_api_headers + +random.seed('◔ ⌣ ◔') + + +def test_unauthenticated_user_gets_all_endpoints(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_endpoints')) + + expected_endpoints = ['current_user_url', 'submissions_url', + 'sources_url', 'auth_token_url', + 'replies_url'] + expected_endpoints.sort() + sorted_observed_endpoints = list(response.json.keys()) + sorted_observed_endpoints.sort() + assert expected_endpoints == sorted_observed_endpoints + + +def test_valid_user_can_get_an_api_token(journalist_app, test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + + assert response.json['journalist_uuid'] == test_journo['uuid'] + assert isinstance(Journalist.validate_api_token_and_get_user( + response.json['token']), Journalist) is True + assert response.status_code == 200 + assert response.json['journalist_first_name'] == test_journo['first_name'] + assert response.json['journalist_last_name'] == test_journo['last_name'] + + +def test_user_cannot_get_an_api_token_with_wrong_password(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': 'wrong password', + 'one_time_code': valid_token}), + headers=get_api_headers()) + + assert response.status_code == 403 + assert response.json['error'] == 'Forbidden' + + +def test_user_cannot_get_an_api_token_with_wrong_2fa_token(journalist_app, + test_journo, + hardening): + with journalist_app.test_client() as app: + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password'], + 'one_time_code': '123456'}), + headers=get_api_headers()) + + assert response.status_code == 403 + assert response.json['error'] == 'Forbidden' + + +def test_user_cannot_get_an_api_token_with_no_passphase_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + + assert response.status_code == 400 + assert response.json['error'] == 'Bad Request' + assert response.json['message'] == 'passphrase field is missing' + + +def test_user_cannot_get_an_api_token_with_no_username_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + valid_token = TOTP(test_journo['otp_secret']).now() + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'passphrase': test_journo['password'], + 'one_time_code': valid_token}), + headers=get_api_headers()) + + assert response.status_code == 400 + assert response.json['error'] == 'Bad Request' + assert response.json['message'] == 'username field is missing' + + +def test_user_cannot_get_an_api_token_with_no_otp_field(journalist_app, + test_journo): + with journalist_app.test_client() as app: + response = app.post(url_for('api.get_token'), + data=json.dumps( + {'username': test_journo['username'], + 'passphrase': test_journo['password']}), + headers=get_api_headers()) + + assert response.status_code == 400 + assert response.json['error'] == 'Bad Request' + assert response.json['message'] == 'one_time_code field is missing' + + +def test_authorized_user_gets_all_sources(journalist_app, test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_sources'), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # We expect to see our test source in the response + assert test_submissions['source'].journalist_designation == \ + response.json['sources'][0]['journalist_designation'] + + +def test_user_without_token_cannot_get_protected_endpoints(journalist_app, + test_files): + with journalist_app.app_context(): + uuid = test_files['source'].uuid + protected_routes = [ + url_for('api.get_all_sources'), + url_for('api.single_source', source_uuid=uuid), + url_for('api.all_source_submissions', source_uuid=uuid), + url_for('api.single_submission', source_uuid=uuid, + submission_uuid=test_files['submissions'][0].uuid), + url_for('api.download_submission', source_uuid=uuid, + submission_uuid=test_files['submissions'][0].uuid), + url_for('api.get_all_submissions'), + url_for('api.get_all_replies'), + url_for('api.single_reply', source_uuid=uuid, + reply_uuid=test_files['replies'][0].uuid), + url_for('api.all_source_replies', source_uuid=uuid), + url_for('api.get_current_user') + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.get(protected_route, + headers=get_api_headers('')) + + assert response.status_code == 403 + + +def test_user_without_token_cannot_del_protected_endpoints(journalist_app, + test_submissions): + with journalist_app.app_context(): + uuid = test_submissions['source'].uuid + protected_routes = [ + url_for('api.single_source', source_uuid=uuid), + url_for('api.single_submission', source_uuid=uuid, + submission_uuid=test_submissions['submissions'][0].uuid), + url_for('api.remove_star', source_uuid=uuid), + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.delete(protected_route, + headers=get_api_headers('')) + + assert response.status_code == 403 + + +def test_attacker_cannot_create_valid_token_with_none_alg(journalist_app, + test_source, + test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + s = TimedJSONWebSignatureSerializer('not the secret key', + algorithm_name='none') + attacker_token = s.dumps({'id': test_journo['id']}).decode('ascii') + + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(attacker_token)) + + assert response.status_code == 403 + + +def test_attacker_cannot_use_token_after_admin_deletes(journalist_app, + test_source, + journalist_api_token): + + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + + # In a scenario where an attacker compromises a journalist workstation + # the admin should be able to delete the user and their token should + # no longer be valid. + attacker = Journalist.validate_api_token_and_get_user( + journalist_api_token) + + db.session.delete(attacker) + db.session.commit() + + # Now this token should not be valid. + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 403 + + +def test_user_without_token_cannot_post_protected_endpoints(journalist_app, + test_source): + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.all_source_replies', source_uuid=uuid), + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid) + ] + + with journalist_app.test_client() as app: + for protected_route in protected_routes: + response = app.post(protected_route, + headers=get_api_headers(''), + data=json.dumps({'some': 'stuff'})) + assert response.status_code == 403 + + +def test_api_error_handlers_defined(journalist_app): + """Ensure the expected error handler is defined in the API blueprint""" + for status_code in [400, 401, 403, 404, 500]: + result = journalist_app.error_handler_spec['api'][status_code] + + expected_error_handler = '_handle_api_http_exception' + assert list(result.values())[0].__name__ == expected_error_handler + + +def test_api_error_handler_404(journalist_app, journalist_api_token): + with journalist_app.test_client() as app: + response = app.get('/api/v1/invalidendpoint', + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 404 + assert response.json['error'] == 'Not Found' + + +def test_trailing_slash_cleanly_404s(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.get(url_for('api.single_source', + source_uuid=uuid) + '/', + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 404 + assert response.json['error'] == 'Not Found' + + +def test_authorized_user_gets_single_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + assert response.json['uuid'] == test_source['source'].uuid + assert response.json['key']['fingerprint'] == \ + test_source['source'].fingerprint + assert 'BEGIN PGP PUBLIC KEY' in response.json['key']['public'] + + +def test_get_non_existant_source_404s(journalist_app, journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.single_source', source_uuid=1), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 404 + + +def test_authorized_user_can_flag_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.flag', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Verify that the source was flagged. + assert Source.query.get(source_id).flagged + + +def test_authorized_user_can_star_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 201 + + # Verify that the source was starred. + assert SourceStar.query.filter( + SourceStar.source_id == source_id).one().starred + + # API should also report is_starred is true + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.json['is_starred'] is True + + +def test_authorized_user_can_unstar_a_source(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + source_id = test_source['source'].id + response = app.post(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 201 + + response = app.delete(url_for('api.remove_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + # Verify that the source is gone. + assert SourceStar.query.filter( + SourceStar.source_id == source_id).one().starred is False + + # API should also report is_starred is false + response = app.get(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.json['is_starred'] is False + + +def test_disallowed_methods_produces_405(journalist_app, test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.delete(url_for('api.add_star', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 405 + assert response.json['error'] == 'Method Not Allowed' + + +def test_authorized_user_can_get_all_submissions(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_submissions'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + observed_submissions = [submission['filename'] for + submission in response.json['submissions']] + + expected_submissions = [submission.filename for + submission in Submission.query.all()] + assert observed_submissions == expected_submissions + + +def test_authorized_user_get_source_submissions(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_submissions['source'].uuid + response = app.get(url_for('api.all_source_submissions', + source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + observed_submissions = [submission['filename'] for + submission in response.json['submissions']] + + expected_submissions = [submission.filename for submission in + test_submissions['source'].submissions] + assert observed_submissions == expected_submissions + + +def test_authorized_user_can_get_single_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + response = app.get(url_for('api.single_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + assert response.json['uuid'] == submission_uuid + assert response.json['is_read'] is False + assert response.json['filename'] == \ + test_submissions['source'].submissions[0].filename + assert response.json['size'] == \ + test_submissions['source'].submissions[0].size + + +def test_authorized_user_can_get_all_replies(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_all_replies'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + observed_replies = [reply['filename'] for + reply in response.json['replies']] + + expected_replies = [reply.filename for + reply in Reply.query.all()] + assert observed_replies == expected_replies + + +def test_authorized_user_get_source_replies(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_files['source'].uuid + response = app.get(url_for('api.all_source_replies', + source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + observed_replies = [reply['filename'] for + reply in response.json['replies']] + + expected_replies = [reply.filename for + reply in test_files['source'].replies] + assert observed_replies == expected_replies + + +def test_authorized_user_can_get_single_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + response = app.get(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + reply = Reply.query.filter(Reply.uuid == reply_uuid).one() + + assert response.json['uuid'] == reply_uuid + assert response.json['journalist_username'] == \ + reply.journalist.username + assert response.json['journalist_uuid'] == \ + reply.journalist.uuid + assert response.json['journalist_first_name'] == \ + reply.journalist.first_name + assert response.json['journalist_last_name'] == \ + reply.journalist.last_name + assert response.json['is_deleted_by_source'] is False + assert response.json['filename'] == \ + test_files['source'].replies[0].filename + assert response.json['size'] == \ + test_files['source'].replies[0].size + + +def test_authorized_user_can_delete_single_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + response = app.delete(url_for('api.single_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Submission now should be gone. + assert Submission.query.filter( + Submission.uuid == submission_uuid).all() == [] + + +def test_authorized_user_can_delete_single_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + response = app.delete(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Reply should now be gone. + assert Reply.query.filter(Reply.uuid == reply_uuid).all() == [] + + +def test_authorized_user_can_delete_source_collection(journalist_app, + test_source, + journalist_api_token): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.delete(url_for('api.single_source', source_uuid=uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Source does not exist + assert Source.query.all() == [] + + +def test_authorized_user_can_download_submission(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + submission_uuid = test_submissions['source'].submissions[0].uuid + uuid = test_submissions['source'].uuid + + response = app.get(url_for('api.download_submission', + source_uuid=uuid, + submission_uuid=submission_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Submission should now be marked as downloaded in the database + submission = Submission.query.get( + test_submissions['source'].submissions[0].id) + assert submission.downloaded + + # Response should be a PGP encrypted download + assert response.mimetype == 'application/pgp-encrypted' + + # Response should have Etag field with hash + assert response.headers['ETag'].startswith('sha256:') + + +def test_authorized_user_can_download_reply(journalist_app, test_files, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files['source'].replies[0].uuid + uuid = test_files['source'].uuid + + response = app.get(url_for('api.download_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + # Response should be a PGP encrypted download + assert response.mimetype == 'application/pgp-encrypted' + + # Response should have Etag field with hash + assert response.headers['ETag'].startswith('sha256:') + + +def test_authorized_user_can_get_current_user_endpoint(journalist_app, + test_journo, + journalist_api_token): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + + assert response.json['is_admin'] is False + assert response.json['username'] == test_journo['username'] + assert response.json['uuid'] == test_journo['journalist'].uuid + assert response.json['first_name'] == test_journo['journalist'].first_name + assert response.json['last_name'] == test_journo['journalist'].last_name + + +def test_request_with_missing_auth_header_triggers_403(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }) + assert response.status_code == 403 + + +def test_request_with_auth_header_but_no_token_triggers_403(journalist_app): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_current_user'), + headers={ + 'Authorization': '', + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }) + assert response.status_code == 403 + + +def test_unencrypted_replies_get_rejected(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + reply_content = 'This is a plaintext reply' + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data=json.dumps({'reply': reply_content}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + source_id = test_source['source'].id + uuid = test_source['source'].uuid + + # First we must encrypt the reply, or it will get rejected + # by the server. + source_key = current_app.crypto_util.getkey( + test_source['source'].filesystem_id) + reply_content = current_app.crypto_util.gpg.encrypt( + 'This is a plaintext reply', source_key).data + + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data=json.dumps({'reply': reply_content.decode('utf-8')}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 201 + + # ensure the uuid is present and valid + reply_uuid = UUID(response.json['uuid']) + + # check that the uuid has a matching db object + reply = Reply.query.filter_by(uuid=str(reply_uuid)).one_or_none() + assert reply is not None + + # check that the filename is present and correct (#4047) + assert response.json['filename'] == reply.filename + + with journalist_app.app_context(): # Now verify everything was saved. + assert reply.journalist_id == test_journo['id'] + assert reply.source_id == source_id + + # regression test for #3918 + assert '/' not in reply.filename + + source = Source.query.get(source_id) + + expected_filename = '{}-{}-reply.gpg'.format( + source.interaction_count, source.journalist_filename) + + expected_filepath = current_app.storage.path( + source.filesystem_id, expected_filename) + + with open(expected_filepath, 'rb') as fh: + saved_content = fh.read() + + assert reply_content == saved_content + + +def test_reply_without_content_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data=json.dumps({'reply': ''}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_without_reply_field_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data=json.dumps({'other': 'stuff'}), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_without_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data='invalid', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + +def test_reply_with_valid_curly_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data='{}', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + assert response.json['message'] == 'reply not found in request body' + + +def test_reply_with_valid_square_json_400(journalist_app, journalist_api_token, + test_source, test_journo): + with journalist_app.test_client() as app: + uuid = test_source['source'].uuid + response = app.post(url_for('api.all_source_replies', + source_uuid=uuid), + data='[]', + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 400 + + assert response.json['message'] == 'reply not found in request body' + + +def test_malformed_json_400(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.get_token'), + url_for('api.all_source_replies', source_uuid=uuid), + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="{this is invalid {json!", + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 400 + assert response.json['error'] == 'Bad Request' + + +def test_empty_json_400(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.get_token'), + url_for('api.all_source_replies', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="", + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 400 + assert response.json['error'] == 'Bad Request' + + +def test_empty_json_20X(journalist_app, journalist_api_token, test_journo, + test_source): + + with journalist_app.app_context(): + uuid = test_source['source'].uuid + protected_routes = [ + url_for('api.add_star', source_uuid=uuid), + url_for('api.flag', source_uuid=uuid), + ] + with journalist_app.test_client() as app: + for protected_route in protected_routes: + + response = app.post(protected_route, + data="", + headers=get_api_headers(journalist_api_token)) + + assert response.status_code in (200, 201) + + +def test_set_reply_uuid(journalist_app, journalist_api_token, test_source): + msg = '-----BEGIN PGP MESSAGE-----\nwat\n-----END PGP MESSAGE-----' + reply_uuid = str(uuid4()) + req_data = {'uuid': reply_uuid, 'reply': msg} + + with journalist_app.test_client() as app: + # first check that we can set a valid UUID + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 201 + assert resp.json['uuid'] == reply_uuid + + reply = Reply.query.filter_by(uuid=reply_uuid).one_or_none() + assert reply is not None + + len_of_replies = len(Source.query.get(test_source['id']).replies) + + # next check that requesting with the same UUID does not succeed + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 409 + + new_len_of_replies = len(Source.query.get(test_source['id']).replies) + + assert new_len_of_replies == len_of_replies + + # check setting null for the uuid field doesn't break + req_data['uuid'] = None + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 201 + + new_uuid = resp.json['uuid'] + reply = Reply.query.filter_by(uuid=new_uuid).one_or_none() + assert reply is not None + + # check setting invalid values for the uuid field doesn't break + req_data['uuid'] = 'not a uuid' + source_uuid = test_source['uuid'] + resp = app.post(url_for('api.all_source_replies', + source_uuid=source_uuid), + data=json.dumps(req_data), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 400 + + +def test_api_does_not_set_cookie_headers(journalist_app, test_journo): + with journalist_app.test_client() as app: + response = app.get(url_for('api.get_endpoints')) + + observed_headers = response.headers + assert 'Set-Cookie' not in list(observed_headers.keys()) + if 'Vary' in list(observed_headers.keys()): + assert 'Cookie' not in observed_headers['Vary'] + + +# regression test for #4053 +def test_malformed_auth_token(journalist_app, journalist_api_token): + with journalist_app.app_context(): + # we know this endpoint requires an auth header + url = url_for('api.get_all_sources') + + with journalist_app.test_client() as app: + # precondition to ensure token is even valid + resp = app.get(url, headers={'Authorization': 'Token {}'.format(journalist_api_token)}) + assert resp.status_code == 200 + + resp = app.get(url, headers={'Authorization': 'not-token {}'.format(journalist_api_token)}) + assert resp.status_code == 403 + + resp = app.get(url, headers={'Authorization': journalist_api_token}) + assert resp.status_code == 403 + + resp = app.get(url, headers={'Authorization': 'too many {}'.format(journalist_api_token)}) + assert resp.status_code == 403 + + +def test_submission_download_generates_checksum(journalist_app, + journalist_api_token, + test_source, + test_submissions, + mocker): + submission = test_submissions['submissions'][0] + assert submission.checksum is None # precondition + + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_submission', + source_uuid=test_source['uuid'], + submission_uuid=submission.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + # check that the submission checksum was added + fetched_submission = Submission.query.get(submission.id) + assert fetched_submission.checksum + + mock_add_checksum = mocker.patch('journalist_app.utils.add_checksum_for_file') + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_submission', + source_uuid=test_source['uuid'], + submission_uuid=submission.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + fetched_submission = Submission.query.get(submission.id) + assert fetched_submission.checksum + # we don't want to recalculat this value + assert not mock_add_checksum.called + + +def test_reply_download_generates_checksum(journalist_app, + journalist_api_token, + test_source, + test_files, + mocker): + reply = test_files['replies'][0] + assert reply.checksum is None # precondition + + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_reply', + source_uuid=test_source['uuid'], + reply_uuid=reply.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + # check that the reply checksum was added + fetched_reply = Reply.query.get(reply.id) + assert fetched_reply.checksum + + mock_add_checksum = mocker.patch('journalist_app.utils.add_checksum_for_file') + with journalist_app.test_client() as app: + response = app.get(url_for('api.download_reply', + source_uuid=test_source['uuid'], + reply_uuid=reply.uuid), + headers=get_api_headers(journalist_api_token)) + assert response.status_code == 200 + assert response.headers['ETag'] + + fetched_reply = Reply.query.get(reply.id) + assert fetched_reply.checksum + # we don't want to recalculat this value + assert not mock_add_checksum.called + + +def test_revoke_token(journalist_app, test_journo, journalist_api_token): + with journalist_app.test_client() as app: + # without token 403's + resp = app.post(url_for('api.logout')) + assert resp.status_code == 403 + + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + revoked_token = RevokedToken.query.filter_by(token=journalist_api_token).one() + assert revoked_token.journalist_id == test_journo['id'] + + resp = app.get(url_for('api.get_all_sources'), + headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 403 diff --git a/securedrop/tests/test_journalist_utils.py b/securedrop/tests/test_journalist_utils.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_journalist_utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +from flask import url_for +import os +import pytest +import random + +from models import RevokedToken +from sqlalchemy.orm.exc import NoResultFound + +from journalist_app.utils import cleanup_expired_revoked_tokens + +os.environ['SECUREDROP_ENV'] = 'test' # noqa +from .utils.api_helper import get_api_headers + +random.seed('◔ ⌣ ◔') + + +def test_revoke_token_cleanup_does_not_delete_tokens_if_not_expired(journalist_app, test_journo, + journalist_api_token): + with journalist_app.test_client() as app: + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + cleanup_expired_revoked_tokens() + + revoked_token = RevokedToken.query.filter_by(token=journalist_api_token).one() + assert revoked_token.journalist_id == test_journo['id'] + + +def test_revoke_token_cleanup_does_deletes_tokens_that_are_expired(journalist_app, test_journo, + journalist_api_token, mocker): + with journalist_app.test_client() as app: + resp = app.post(url_for('api.logout'), headers=get_api_headers(journalist_api_token)) + assert resp.status_code == 200 + + # Mock response from expired token method when token is expired + mocker.patch('journalist_app.admin.Journalist.validate_token_is_not_expired_or_invalid', + return_value=None) + cleanup_expired_revoked_tokens() + + with pytest.raises(NoResultFound): + RevokedToken.query.filter_by(token=journalist_api_token).one() diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py --- a/securedrop/tests/test_manage.py +++ b/securedrop/tests/test_manage.py @@ -1,401 +1,235 @@ # -*- coding: utf-8 -*- import argparse -import os -from os.path import abspath, dirname, exists, getmtime, join, realpath -os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config +import datetime +import io import logging +import os +import time + import manage import mock -import pytest -from sqlalchemy.orm.exc import NoResultFound -from StringIO import StringIO -import shutil -import subprocess -import sys -import time -import unittest -import version -import utils +from management import submissions +from models import Journalist, db + +from .utils import db_helper -from db import Journalist, db_session +os.environ['SECUREDROP_ENV'] = 'test' # noqa YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc', 'cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7'] -class TestManagePy(object): - def test_parse_args(self): - # just test that the arg parser is stable - manage.get_args() +def test_parse_args(): + # just test that the arg parser is stable + manage.get_args() - def test_not_verbose(self, caplog): - args = manage.get_args().parse_args(['run']) - manage.setup_verbosity(args) - manage.log.debug('INVISIBLE') - assert 'INVISIBLE' not in caplog.text - def test_verbose(self, caplog): - args = manage.get_args().parse_args(['--verbose', 'run']) - manage.setup_verbosity(args) - manage.log.debug('VISIBLE') - assert 'VISIBLE' in caplog.text +def test_not_verbose(caplog): + args = manage.get_args().parse_args(['run']) + manage.setup_verbosity(args) + manage.log.debug('INVISIBLE') + assert 'INVISIBLE' not in caplog.text -class TestManagementCommand(unittest.TestCase): - def setUp(self): - utils.env.setup() +def test_verbose(caplog): + args = manage.get_args().parse_args(['--verbose', 'run']) + manage.setup_verbosity(args) + manage.log.debug('VISIBLE') + assert 'VISIBLE' in caplog.text - def tearDown(self): - utils.env.teardown() - @mock.patch("__builtin__.raw_input", return_value='jen') - def test_get_username_success(self, mock_stdin): +def test_get_username_success(): + with mock.patch("manage.obtain_input", return_value='jen'): assert manage._get_username() == 'jen' - @mock.patch("__builtin__.raw_input", - side_effect=['a' * (Journalist.MIN_USERNAME_LEN - 1), 'jen']) - def test_get_username_fail(self, mock_stdin): + +def test_get_username_fail(): + bad_username = 'a' * (Journalist.MIN_USERNAME_LEN - 1) + with mock.patch("manage.obtain_input", + side_effect=[bad_username, 'jen']): assert manage._get_username() == 'jen' - @mock.patch("__builtin__.raw_input", return_value='y') - def test_get_yubikey_usage_yes(self, mock_stdin): + +def test_get_yubikey_usage_yes(): + with mock.patch("manage.obtain_input", return_value='y'): assert manage._get_yubikey_usage() - @mock.patch("__builtin__.raw_input", return_value='n') - def test_get_yubikey_usage_no(self, mock_stdin): + +def test_get_yubikey_usage_no(): + with mock.patch("manage.obtain_input", return_value='n'): assert not manage._get_yubikey_usage() - @mock.patch("manage._get_username", return_value='ntoll') - @mock.patch("manage._get_yubikey_usage", return_value=True) - @mock.patch("__builtin__.raw_input", side_effect=YUBIKEY_HOTP) - @mock.patch("sys.stdout", new_callable=StringIO) - def test_handle_invalid_secret(self, mock_username, mock_yubikey, - mock_htop, mock_stdout): - """Regression test for bad secret logic in manage.py""" +# Note: we use the `journalist_app` fixture because it creates the DB +def test_handle_invalid_secret(journalist_app, config, mocker, capsys): + """Regression test for bad secret logic in manage.py""" + + mocker.patch("manage._get_username", return_value='ntoll'), + mocker.patch("manage._get_first_name", return_value=''), + mocker.patch("manage._get_last_name", return_value=''), + mocker.patch("manage._get_yubikey_usage", return_value=True), + mocker.patch("manage.obtain_input", side_effect=YUBIKEY_HOTP), + + with journalist_app.app_context() as context: # We will try to provide one invalid and one valid secret - return_value = manage._add_user() - self.assertEqual(return_value, 0) - self.assertIn('Try again.', sys.stdout.getvalue()) - self.assertIn('successfully added', sys.stdout.getvalue()) - - @mock.patch("manage._get_username", return_value='foo-bar-baz') - @mock.patch("manage._get_yubikey_usage", return_value=False) - @mock.patch("sys.stdout", new_callable=StringIO) - def test_exception_handling_when_duplicate_username(self, - mock_username, - mock_yubikey, - mock_stdout): - """Regression test for duplicate username logic in manage.py""" + return_value = manage._add_user(context=context) + out, err = capsys.readouterr() + assert return_value == 0 + assert 'Try again.' in out + assert 'successfully added' in out + + +# Note: we use the `journalist_app` fixture because it creates the DB +def test_exception_handling_when_duplicate_username(journalist_app, + config, + mocker, capsys): + """Regression test for duplicate username logic in manage.py""" + + mocker.patch("manage._get_username", return_value='foo-bar-baz') + mocker.patch("manage._get_first_name", return_value='') + mocker.patch("manage._get_last_name", return_value='') + mocker.patch("manage._get_yubikey_usage", return_value=False) + + with journalist_app.app_context() as context: # Inserting the user for the first time should succeed - return_value = manage._add_user() - self.assertEqual(return_value, 0) - self.assertIn('successfully added', sys.stdout.getvalue()) + return_value = manage._add_user(context=context) + out, err = capsys.readouterr() + + assert return_value == 0 + assert 'successfully added' in out # Inserting the user for a second time should fail return_value = manage._add_user() - self.assertEqual(return_value, 1) - self.assertIn('ERROR: That username is already taken!', - sys.stdout.getvalue()) - - @mock.patch("manage._get_username", return_value='test-user-56789') - @mock.patch("manage._get_yubikey_usage", return_value=False) - @mock.patch("manage._get_username_to_delete", - return_value='test-user-56789') - @mock.patch('manage._get_delete_confirmation', return_value=True) - def test_delete_user(self, - mock_username, - mock_yubikey, - mock_user_to_delete, - mock_user_del_confirm): - return_value = manage._add_user() - self.assertEqual(return_value, 0) + out, err = capsys.readouterr() + assert return_value == 1 + assert 'ERROR: That username is already taken!' in out + + +# Note: we use the `journalist_app` fixture because it creates the DB +def test_delete_user(journalist_app, config, mocker): + mocker.patch("manage._get_username", return_value='test-user-56789') + mocker.patch("manage._get_first_name", return_value='') + mocker.patch("manage._get_last_name", return_value='') + mocker.patch("manage._get_yubikey_usage", return_value=False) + mocker.patch("manage._get_username_to_delete", + return_value='test-user-56789') + mocker.patch('manage._get_delete_confirmation', return_value=True) + + with journalist_app.app_context() as context: + return_value = manage._add_user(context=context) + assert return_value == 0 return_value = manage.delete_user(args=None) - self.assertEqual(return_value, 0) - - @mock.patch("manage._get_username_to_delete", - return_value='does-not-exist') - @mock.patch('manage._get_delete_confirmation', return_value=True) - @mock.patch("sys.stdout", new_callable=StringIO) - def test_delete_non_existent_user(self, - mock_user_to_delete, - mock_user_del_confirm, - mock_stdout): - return_value = manage.delete_user(args=None) - self.assertEqual(return_value, 0) - self.assertIn('ERROR: That user was not found!', - sys.stdout.getvalue()) - - @mock.patch("__builtin__.raw_input", return_value='test-user-12345') - def test_get_username_to_delete(self, mock_username): - return_value = manage._get_username_to_delete() - self.assertEqual(return_value, 'test-user-12345') - - def test_reset(self): - test_journalist, _ = utils.db_helper.init_journalist() - user_should_be_gone = test_journalist.username - - return_value = manage.reset(args=None) - - self.assertEqual(return_value, 0) - assert os.path.exists(config.DATABASE_FILE) - assert os.path.exists(config.STORE_DIR) - - # Verify journalist user present in the database is gone - db_session.remove() # Close session and get a session on the new db - with self.assertRaises(NoResultFound): - Journalist.query.filter_by(username=user_should_be_gone).one() - - -class TestManage(object): - - def setup(self): - self.dir = abspath(dirname(realpath(__file__))) - utils.env.setup() - - def teardown(self): - utils.env.teardown() - - @mock.patch("__builtin__.raw_input", return_value='foo-bar-baz') - def test_get_username(self, mock_get_usernam): - assert manage._get_username() == 'foo-bar-baz' - - def test_translate_desktop_l10n(self): - in_files = {} - for what in ('source', 'journalist'): - in_files[what] = join(config.TEMP_DIR, what + '.desktop.in') - shutil.copy(join(self.dir, 'i18n/' + what + '.desktop.in'), - in_files[what]) - kwargs = { - 'translations_dir': config.TEMP_DIR, - 'source': [in_files['source']], - 'extract_update': True, - 'compile': False, - 'verbose': logging.DEBUG, - 'version': version.__version__, - } - args = argparse.Namespace(**kwargs) - manage.setup_verbosity(args) - manage.translate_desktop(args) - messages_file = join(config.TEMP_DIR, 'desktop.pot') - assert exists(messages_file) - pot = open(messages_file).read() - assert 'SecureDrop Source Interfaces' in pot - # pretend this happened a few seconds ago - few_seconds_ago = time.time() - 60 - os.utime(messages_file, (few_seconds_ago, few_seconds_ago)) - - i18n_file = join(config.TEMP_DIR, 'source.desktop') - - # - # Extract+update but do not compile - # - kwargs['source'] = in_files.values() - old_messages_mtime = getmtime(messages_file) - assert not exists(i18n_file) - manage.translate_desktop(args) - assert not exists(i18n_file) - current_messages_mtime = getmtime(messages_file) - assert old_messages_mtime < current_messages_mtime - - locale = 'fr_FR' - po_file = join(config.TEMP_DIR, locale + ".po") - manage.sh(""" - msginit --no-translator \ - --locale {locale} \ - --output {po_file} \ - --input {messages_file} - sed -i -e '/{source}/,+1s/msgstr ""/msgstr "SOURCE FR"/' \ - {po_file} - """.format(source='SecureDrop Source Interfaces', - messages_file=messages_file, - po_file=po_file, - locale=locale)) - assert exists(po_file) - - # - # Compile but do not extract+update - # - kwargs['source'] = in_files.values() + ['BOOM'] - kwargs['extract_update'] = False - kwargs['compile'] = True - args = argparse.Namespace(**kwargs) - old_messages_mtime = current_messages_mtime - manage.translate_desktop(args) - assert old_messages_mtime == getmtime(messages_file) - po = open(po_file).read() - assert 'SecureDrop Source Interfaces' in po - assert 'SecureDrop Journalist Interfaces' not in po - i18n = open(i18n_file).read() - assert 'SOURCE FR' in i18n - - def test_translate_messages_l10n(self): - source = [ - join(self.dir, 'i18n/code.py'), - join(self.dir, 'i18n/template.html'), - ] - kwargs = { - 'translations_dir': config.TEMP_DIR, - 'mapping': join(self.dir, 'i18n/babel.cfg'), - 'source': source, - 'extract_update': True, - 'compile': True, - 'verbose': logging.DEBUG, - 'version': version.__version__, - } - args = argparse.Namespace(**kwargs) - manage.setup_verbosity(args) - manage.translate_messages(args) - messages_file = join(config.TEMP_DIR, 'messages.pot') - assert exists(messages_file) - pot = open(messages_file).read() - assert 'code hello i18n' in pot - assert 'template hello i18n' in pot - - locale = 'en_US' - locale_dir = join(config.TEMP_DIR, locale) - manage.sh("pybabel init -i {} -d {} -l {}".format( - messages_file, - config.TEMP_DIR, - locale, - )) - mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo') - assert not exists(mo_file) - manage.translate_messages(args) - assert exists(mo_file) - mo = open(mo_file).read() - assert 'code hello i18n' in mo - assert 'template hello i18n' in mo - - def test_translate_messages_compile_arg(self): - source = [ - join(self.dir, 'i18n/code.py'), - ] - kwargs = { - 'translations_dir': config.TEMP_DIR, - 'mapping': join(self.dir, 'i18n/babel.cfg'), - 'source': source, - 'extract_update': True, - 'compile': False, - 'verbose': logging.DEBUG, - 'version': version.__version__, - } - args = argparse.Namespace(**kwargs) - manage.setup_verbosity(args) - manage.translate_messages(args) - messages_file = join(config.TEMP_DIR, 'messages.pot') - assert exists(messages_file) - pot = open(messages_file).read() - assert 'code hello i18n' in pot - - locale = 'en_US' - locale_dir = join(config.TEMP_DIR, locale) - po_file = join(locale_dir, 'LC_MESSAGES/messages.po') - manage.sh("pybabel init -i {} -d {} -l {}".format( - messages_file, - config.TEMP_DIR, - locale, - )) - assert exists(po_file) - # pretend this happened a few seconds ago - few_seconds_ago = time.time() - 60 - os.utime(po_file, (few_seconds_ago, few_seconds_ago)) - - mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo') - - # - # Extract+update but do not compile - # - old_po_mtime = getmtime(po_file) - assert not exists(mo_file) - manage.translate_messages(args) - assert not exists(mo_file) - current_po_mtime = getmtime(po_file) - assert old_po_mtime < current_po_mtime - - # - # Compile but do not extract+update - # - source = [ - join(self.dir, 'i18n/code.py'), - join(self.dir, 'i18n/template.html'), - ] - kwargs['extract_update'] = False - kwargs['compile'] = True - args = argparse.Namespace(**kwargs) - old_po_mtime = current_po_mtime - manage.translate_messages(args) - assert old_po_mtime == getmtime(po_file) - mo = open(mo_file).read() - assert 'code hello i18n' in mo - assert 'template hello i18n' not in mo - - def test_clean_tmp_do_nothing(self, caplog): - args = argparse.Namespace(days=0, - directory=' UNLIKELY ', - verbose=logging.DEBUG) - manage.setup_verbosity(args) - manage.clean_tmp(args) - assert 'does not exist, do nothing' in caplog.text - - def test_clean_tmp_too_young(self, caplog): - args = argparse.Namespace(days=24*60*60, - directory=config.TEMP_DIR, - verbose=logging.DEBUG) - open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close() - manage.setup_verbosity(args) - manage.clean_tmp(args) - assert 'modified less than' in caplog.text - - def test_clean_tmp_removed(self, caplog): - args = argparse.Namespace(days=0, - directory=config.TEMP_DIR, - verbose=logging.DEBUG) - fname = os.path.join(config.TEMP_DIR, 'FILE') - with open(fname, 'a'): - old = time.time() - 24*60*60 - os.utime(fname, (old, old)) - manage.setup_verbosity(args) - manage.clean_tmp(args) - assert 'FILE removed' in caplog.text - - -class TestSh(object): - - def test_sh(self): - assert 'A' == manage.sh("echo -n A") - with pytest.raises(Exception) as excinfo: - manage.sh("exit 123") - assert excinfo.value.returncode == 123 - - def test_sh_progress(self, caplog): - manage.sh("echo AB ; sleep 5 ; echo C") - records = caplog.records - assert ':sh: ' in records[0].message - assert records[0].levelname == 'DEBUG' - assert 'AB' == records[1].message - assert records[1].levelname == 'DEBUG' - assert 'C' == records[2].message - assert records[2].levelname == 'DEBUG' - - def test_sh_input(self, caplog): - assert 'abc' == manage.sh("cat", 'abc') - - def test_sh_fail(self, caplog): - level = manage.log.getEffectiveLevel() - manage.log.setLevel(logging.INFO) - assert manage.log.getEffectiveLevel() == logging.INFO - with pytest.raises(subprocess.CalledProcessError) as excinfo: - manage.sh("echo AB ; echo C ; exit 111") - manage.log.setLevel(level) - assert excinfo.value.returncode == 111 - records = caplog.records - assert 'AB' == records[0].message - assert records[0].levelname == 'ERROR' - assert 'C' == records[1].message - assert records[1].levelname == 'ERROR' + assert return_value == 0 + + +# Note: we use the `journalist_app` fixture because it creates the DB +def test_delete_non_existent_user(journalist_app, config, mocker, capsys): + mocker.patch("manage._get_username_to_delete", + return_value='does-not-exist') + mocker.patch('manage._get_delete_confirmation', return_value=True) + + with journalist_app.app_context() as context: + return_value = manage.delete_user(args=None, context=context) + out, err = capsys.readouterr() + assert return_value == 0 + assert 'ERROR: That user was not found!' in out + + +def test_get_username_to_delete(mocker): + mocker.patch("manage.obtain_input", return_value='test-user-12345') + return_value = manage._get_username_to_delete() + assert return_value == 'test-user-12345' + + +def test_reset(journalist_app, test_journo, alembic_config, config): + original_config = manage.config + try: + # We need to override the config to point at the per-test DB + manage.config = config + with journalist_app.app_context() as context: + # Override the hardcoded alembic.ini value + manage.config.TEST_ALEMBIC_INI = alembic_config + + args = argparse.Namespace(store_dir=config.STORE_DIR) + return_value = manage.reset(args=args, context=context) + + assert return_value == 0 + assert os.path.exists(config.DATABASE_FILE) + assert os.path.exists(config.STORE_DIR) + + # Verify journalist user present in the database is gone + res = Journalist.query.filter_by(username=test_journo['username']).one_or_none() + assert res is None + finally: + manage.config = original_config + + +def test_get_username(mocker): + mocker.patch("manage.obtain_input", return_value='foo-bar-baz') + assert manage._get_username() == 'foo-bar-baz' + + +def test_get_first_name(mocker): + mocker.patch("manage.obtain_input", return_value='foo-bar-baz') + assert manage._get_first_name() == 'foo-bar-baz' + + +def test_get_last_name(mocker): + mocker.patch("manage.obtain_input", return_value='foo-bar-baz') + assert manage._get_last_name() == 'foo-bar-baz' + + +def test_clean_tmp_do_nothing(caplog): + args = argparse.Namespace(days=0, + directory=' UNLIKELY::::::::::::::::: ', + verbose=logging.DEBUG) + manage.setup_verbosity(args) + manage.clean_tmp(args) + assert 'does not exist, do nothing' in caplog.text + + +def test_clean_tmp_too_young(config, caplog): + args = argparse.Namespace(days=24*60*60, + directory=config.TEMP_DIR, + verbose=logging.DEBUG) + # create a file + io.open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close() + + manage.setup_verbosity(args) + manage.clean_tmp(args) + assert 'modified less than' in caplog.text + + +def test_clean_tmp_removed(config, caplog): + args = argparse.Namespace(days=0, + directory=config.TEMP_DIR, + verbose=logging.DEBUG) + fname = os.path.join(config.TEMP_DIR, 'FILE') + with io.open(fname, 'a'): + old = time.time() - 24*60*60 + os.utime(fname, (old, old)) + manage.setup_verbosity(args) + manage.clean_tmp(args) + assert 'FILE removed' in caplog.text + + +def test_were_there_submissions_today(source_app, config): + with source_app.app_context() as context: + # We need to override the config to point at the per-test DB + data_root = config.SECUREDROP_DATA_ROOT + args = argparse.Namespace(data_root=data_root, verbose=logging.DEBUG) + + count_file = os.path.join(data_root, 'submissions_today.txt') + source, codename = db_helper.init_source_without_keypair() + source.last_updated = (datetime.datetime.utcnow() - datetime.timedelta(hours=24*2)) + db.session.commit() + submissions.were_there_submissions_today(args, context) + assert io.open(count_file).read() == "0" + source.last_updated = datetime.datetime.utcnow() + db.session.commit() + submissions.were_there_submissions_today(args, context) + assert io.open(count_file).read() == "1" diff --git a/securedrop/tests/test_qa_loader.py b/securedrop/tests/test_qa_loader.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_qa_loader.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- + +from qa_loader import QaLoader + + +def test_load_data(journalist_app, config): + # Use the journalist_app fixture to init the DB + QaLoader(config, multiplier=1).load() diff --git a/securedrop/tests/test_rm.py b/securedrop/tests/test_rm.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_rm.py @@ -0,0 +1,99 @@ +""" +Test secure deletion utilities in securedrop/rm.py +""" +import os + +import pytest + +import rm + + +def test_secure_delete_capability(config): + assert rm.check_secure_delete_capability() is True + + path = os.environ["PATH"] + try: + os.environ["PATH"] = "{}:{}".format("/bin", config.TEMP_DIR) + assert rm.check_secure_delete_capability() is False + fakeshred = os.path.join(config.TEMP_DIR, "shred") + with open(fakeshred, "w") as f: + f.write("#!/bin/bash\nexit1\n") + os.chmod(fakeshred, 0o700) + assert rm.check_secure_delete_capability() is False + finally: + os.environ["PATH"] = path + + +def test_shred(config): + testfile = "test_shred.txt" + content = "abc123\n" + + # non-existent target should raise an exception + with pytest.raises(EnvironmentError): + rm.shred(os.path.abspath(os.path.join(config.TEMP_DIR, "nonexistentshredtarget"))) + + # a non-file target should raise an exception + d = os.path.abspath(os.path.join(config.TEMP_DIR, "nonexistentshredtarget")) + os.makedirs(d) + with pytest.raises(ValueError): + rm.shred(d) + os.rmdir(d) + + with open(testfile, "w") as f: + f.write(content) + + with open(testfile) as f: + read_content = f.read() + assert read_content == content + + # Shred without deleting, so we can check the new content + rm.shred(testfile, delete=False) + + with open(testfile) as f: + read_content = f.read() + assert read_content != content + + # Shred and delete + rm.shred(testfile) + assert os.path.exists(testfile) is False + + +def test_secure_delete(config): + content = "abc123\n" + testfile = "test_shred.txt" + + # Shred a file + testfile1 = os.path.abspath(os.path.join(config.TEMP_DIR, testfile)) + with open(testfile1, "w") as f: + f.write(content) + + assert os.path.exists(testfile1) + rm.secure_delete(testfile1) + assert os.path.exists(testfile1) is False + + # Shred a directory + testdir = os.path.abspath(os.path.join(config.TEMP_DIR, "shredtest1")) + testsubdir1 = os.path.abspath(os.path.join(testdir, "shredtest1.1")) + testsubdir2 = os.path.abspath(os.path.join(testdir, "shredtest1.2")) + + os.makedirs(testsubdir1) + os.makedirs(testsubdir2) + + testfile1 = os.path.abspath(os.path.join(testdir, testfile)) + with open(testfile1, "w") as f: + f.write(content) + + testfile2 = os.path.abspath(os.path.join(testsubdir1, testfile)) + with open(testfile2, "w") as f: + f.write(content) + + assert os.path.exists(testfile1) + assert os.path.exists(testfile2) + + rm.secure_delete(testdir) + + assert os.path.exists(testfile1) is False + assert os.path.exists(testfile2) is False + assert os.path.exists(testsubdir1) is False + assert os.path.exists(testsubdir2) is False + assert os.path.exists(testdir) is False diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py --- a/securedrop/tests/test_secure_tempfile.py +++ b/securedrop/tests/test_secure_tempfile.py @@ -1,98 +1,110 @@ # -*- coding: utf-8 -*- +import io import os -import unittest +import pytest -from gnupg._util import _is_stream +from pretty_bad_protocol._util import _is_stream os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -import secure_tempfile -import utils +from secure_tempfile import SecureTemporaryFile +MESSAGE = '410,757,864,530' -class TestSecureTempfile(unittest.TestCase): - def setUp(self): - utils.env.setup() - self.f = secure_tempfile.SecureTemporaryFile(config.STORE_DIR) - self.msg = '410,757,864,530' - def tearDown(self): - utils.env.teardown() +def test_read_before_writing(): + f = SecureTemporaryFile('/tmp') + with pytest.raises(AssertionError) as err: + f.read() + assert 'You must write before reading!' in str(err) - def test_read_before_writing(self): - with self.assertRaisesRegexp(AssertionError, - 'You must write before reading!'): - self.f.read() - def test_write_then_read_once(self): - self.f.write(self.msg) +def test_write_then_read_once(): + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + assert f.read().decode('utf-8') == MESSAGE - self.assertEqual(self.f.read(), self.msg) - def test_write_twice_then_read_once(self): - self.f.write(self.msg) - self.f.write(self.msg) +def test_write_twice_then_read_once(): + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + f.write(MESSAGE) + assert f.read().decode('utf-8') == MESSAGE * 2 - self.assertEqual(self.f.read(), self.msg*2) - def test_write_then_read_twice(self): - self.f.write(self.msg) +def test_write_then_read_twice(): + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + assert f.read().decode('utf-8') == MESSAGE + assert f.read() == b'' - self.assertEqual(self.f.read(), self.msg) - self.assertEqual(self.f.read(), '') - def test_write_then_read_then_write(self): - self.f.write(self.msg) - self.f.read() +def test_write_then_read_then_write(): + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + f.read() - with self.assertRaisesRegexp(AssertionError, - 'You cannot write after reading!'): - self.f.write('BORN TO DIE') + with pytest.raises(AssertionError) as err: + f.write('be gentle to each other so we can be dangerous together') + assert 'You cannot write after reading!' in str(err) - def test_read_write_unicode(self): - unicode_msg = u'鬼神 Kill Em All 1989' - self.f.write(unicode_msg) - self.assertEqual(self.f.read().decode('utf-8'), unicode_msg) +def test_read_write_unicode(): + f = SecureTemporaryFile('/tmp') + unicode_msg = '鬼神 Kill Em All 1989' + f.write(unicode_msg) + assert f.read().decode('utf-8') == unicode_msg - def test_file_seems_encrypted(self): - self.f.write(self.msg) - with open(self.f.filepath, 'rb') as fh: - contents = fh.read().decode() - self.assertNotIn(self.msg, contents) +def test_file_seems_encrypted(): + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + with io.open(f.filepath, 'rb') as fh: + contents = fh.read() - def test_file_is_removed_from_disk(self): - fp = self.f.filepath - self.f.write(self.msg) - self.f.read() + assert MESSAGE.encode('utf-8') not in contents + assert MESSAGE not in contents.decode() - self.assertTrue(os.path.exists(fp)) - self.f.close() +def test_file_is_removed_from_disk(): + # once without reading the contents + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + assert os.path.exists(f.filepath) + f.close() + assert not os.path.exists(f.filepath) - self.assertFalse(os.path.exists(fp)) + # once with reading the contents + f = SecureTemporaryFile('/tmp') + f.write(MESSAGE) + f.read() + assert os.path.exists(f.filepath) + f.close() + assert not os.path.exists(f.filepath) - def test_SecureTemporaryFile_is_a_STREAMLIKE_TYPE(self): - self.assertTrue(_is_stream( - secure_tempfile.SecureTemporaryFile('/tmp'))) - def test_buffered_read(self): - msg = self.msg * 1000 - self.f.write(msg) - str = '' - while True: - char = self.f.read(1024) - if char: - str += char - else: - break +def test_SecureTemporaryFile_is_a_STREAMLIKE_TYPE(): + assert _is_stream(SecureTemporaryFile('/tmp')) - self.assertEqual(str, msg) - def test_tmp_file_id_omits_invalid_chars(self): - """The `SecureTempFile.tmp_file_id` instance attribute is used as the filename - for the secure temporary file. This attribute should not contain - invalid characters such as '/' and '\0' (null).""" - self.assertNotIn('/', self.f.tmp_file_id) - self.assertNotIn('\0', self.f.tmp_file_id) +def test_buffered_read(): + f = SecureTemporaryFile('/tmp') + msg = MESSAGE * 1000 + f.write(msg) + out = b'' + while True: + chars = f.read(1024) + if chars: + out += chars + else: + break + + assert out.decode('utf-8') == msg + + +def test_tmp_file_id_omits_invalid_chars(): + """The `SecureTempFile.tmp_file_id` instance attribute is used as the filename + for the secure temporary file. This attribute should not contain + invalid characters such as '/' and '\0' (null).""" + f = SecureTemporaryFile('/tmp') + assert '/' not in f.tmp_file_id + assert '\0' not in f.tmp_file_id diff --git a/securedrop/tests/test_securedrop-admin.py b/securedrop/tests/test_securedrop-admin.py deleted file mode 100644 --- a/securedrop/tests/test_securedrop-admin.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SecureDrop whistleblower submission system -# Copyright (C) 2017 Loic Dachary <[email protected]> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# - -import argparse -import imp -from os.path import abspath, dirname, join, realpath -import pytest -import subprocess - -here = abspath(join(dirname(realpath(__file__)))) -securedrop_admin = imp.load_source('sa', here + '/../securedrop-admin') - - -class TestSecureDropAdmin(object): - - def test_verbose(self, capsys): - securedrop_admin.setup_logger(verbose=True) - securedrop_admin.sdlog.debug('VISIBLE') - out, err = capsys.readouterr() - assert 'VISIBLE' in out - - def test_not_verbose(self, capsys): - securedrop_admin.setup_logger(verbose=False) - securedrop_admin.sdlog.debug('HIDDEN') - securedrop_admin.sdlog.info('VISIBLE') - out, err = capsys.readouterr() - assert 'HIDDEN' not in out - assert 'VISIBLE' in out - - def test_run_command(self): - for output_line in securedrop_admin.run_command( - ['/bin/echo', 'something']): - assert output_line.strip() == 'something' - - lines = [] - with pytest.raises(subprocess.CalledProcessError): - for output_line in securedrop_admin.run_command( - ['sh', '-c', - 'echo in stdout ; echo in stderr >&2 ; false']): - lines.append(output_line.strip()) - assert lines[0] == 'in stdout' - assert lines[1] == 'in stderr' - - def test_install_pip_dependencies_up_to_date(self, caplog): - args = argparse.Namespace() - securedrop_admin.install_pip_dependencies(args, ['/bin/echo']) - assert 'securedrop-admin are up-to-date' in caplog.text - - def test_install_pip_dependencies_upgraded(self, caplog): - args = argparse.Namespace() - securedrop_admin.install_pip_dependencies( - args, ['/bin/echo', 'Successfully installed']) - assert 'securedrop-admin upgraded' in caplog.text - - def test_install_pip_dependencies_fail(self, caplog): - args = argparse.Namespace() - with pytest.raises(SystemExit): - securedrop_admin.install_pip_dependencies( - args, ['/bin/sh', '-c', - 'echo in stdout ; echo in stderr >&2 ; false']) - assert 'Failed to install' in caplog.text - assert 'in stdout' in caplog.text - assert 'in stderr' in caplog.text diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -1,534 +1,756 @@ # -*- coding: utf-8 -*- -from cStringIO import StringIO import gzip -from mock import patch, ANY import re +import subprocess +import time -from flask import session, escape, url_for -from flask_testing import TestCase +from io import BytesIO, StringIO +from flask import session, escape, current_app, url_for, g +from mock import patch, ANY import crypto_util -from db import db_session, Source import source +from . import utils import version -import utils -import json -import config -from utils.db_helper import new_codename + +from db import db +from models import InstanceConfig, Source, Reply +from source_app import main as source_app_main +from source_app import api as source_app_api +from .utils.db_helper import new_codename +from .utils.instrument import InstrumentedApp overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1) -class TestSourceApp(TestCase): +def test_page_not_found(source_app): + """Verify the page not found condition returns the intended template""" + with InstrumentedApp(source_app) as ins: + with source_app.test_client() as app: + resp = app.get('UNKNOWN') + assert resp.status_code == 404 + ins.assert_template_used('notfound.html') - def create_app(self): - return source.app - def setUp(self): - utils.env.setup() +def test_index(source_app): + """Test that the landing page loads and looks how we expect""" + with source_app.test_client() as app: + resp = app.get(url_for('main.index')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert 'First submission' in text + assert 'Return visit' in text - def tearDown(self): - utils.env.teardown() - def test_page_not_found(self): - """Verify the page not found condition returns the intended template""" - response = self.client.get('/UNKNOWN') - self.assert404(response) - self.assertTemplateUsed('notfound.html') +def test_all_words_in_wordlist_validate(source_app): + """Verify that all words in the wordlist are allowed by the form + validation. Otherwise a source will have a codename and be unable to + return.""" - def test_index(self): - """Test that the landing page loads and looks how we expect""" - response = self.client.get('/') - self.assertEqual(response.status_code, 200) - self.assertIn("Submit documents for the first time", response.data) - self.assertIn("Already submitted something?", response.data) + with source_app.app_context(): + wordlist_en = current_app.crypto_util.get_wordlist('en') - def test_all_words_in_wordlist_validate(self): - """Verify that all words in the wordlist are allowed by the form - validation. Otherwise a source will have a codename and be unable to - return.""" + # chunk the words to cut down on the number of requets we make + # otherwise this test is *slow* + chunks = [wordlist_en[i:i + 7] for i in range(0, len(wordlist_en), 7)] - wordlist_en = crypto_util._get_wordlist('en') + with source_app.test_client() as app: + for words in chunks: + resp = app.post(url_for('main.login'), + data=dict(codename=' '.join(words)), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + # If the word does not validate, then it will show + # 'Invalid input'. If it does validate, it should show that + # it isn't a recognized codename. + assert 'Sorry, that is not a recognized codename.' in text + assert 'logged_in' not in session + + +def _find_codename(html): + """Find a source codename (diceware passphrase) in HTML""" + # Codenames may contain HTML escape characters, and the wordlist + # contains various symbols. + codename_re = (r'<p [^>]*id="codename"[^>]*>' + r'(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>') + codename_match = re.search(codename_re, html) + assert codename_match is not None + return codename_match.group('codename') + + +def test_generate(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + session_codename = session['codename'] + + text = resp.data.decode('utf-8') + assert "This codename is what you will use in future visits" in text + + codename = _find_codename(resp.data.decode('utf-8')) + assert len(codename.split()) == Source.NUM_WORDS + # codename is also stored in the session - make sure it matches the + # codename displayed to the source + assert codename == escape(session_codename) + + +def test_generate_already_logged_in(source_app): + with source_app.test_client() as app: + new_codename(app, session) + # Make sure it redirects to /lookup when logged in + resp = app.get(url_for('main.generate')) + assert resp.status_code == 302 + # Make sure it flashes the message on the lookup page + resp = app.get(url_for('main.generate'), follow_redirects=True) + # Should redirect to /lookup + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "because you are already logged in." in text + + +def test_create_new_source(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + resp = app.post(url_for('main.create'), follow_redirects=True) + assert session['logged_in'] is True + # should be redirected to /lookup + text = resp.data.decode('utf-8') + assert "Submit Files" in text + + +def test_generate_too_long_codename(source_app): + """Generate a codename that exceeds the maximum codename length""" + + with patch.object(source_app.logger, 'warning') as logger: + with patch.object(crypto_util.CryptoUtil, 'genrandomid', + side_effect=[overly_long_codename, + 'short codename']): + with source_app.test_client() as app: + resp = app.post(url_for('main.generate')) + assert resp.status_code == 200 - # chunk the words to cut down on the number of requets we make - # otherwise this test is *slow* - chunks = [wordlist_en[i:i + 7] for i in range(0, len(wordlist_en), 7)] + logger.assert_called_with( + "Generated a source codename that was too long, " + "skipping it. This should not happen. " + "(Codename='{}')".format(overly_long_codename) + ) - for words in chunks: - with self.client as c: - resp = c.post('/login', data=dict(codename=' '.join(words)), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - # If the word does not validate, then it will show - # 'Invalid input'. If it does validate, it should show that - # it isn't a recognized codename. - self.assertIn('Sorry, that is not a recognized codename.', - resp.data) - self.assertNotIn('logged_in', session) - - def _find_codename(self, html): - """Find a source codename (diceware passphrase) in HTML""" - # Codenames may contain HTML escape characters, and the wordlist - # contains various symbols. - codename_re = (r'<p [^>]*id="codename"[^>]*>' - r'(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>') - codename_match = re.search(codename_re, html) - self.assertIsNotNone(codename_match) - return codename_match.group('codename') - - def test_generate(self): - with self.client as c: - resp = c.get('/generate') - self.assertEqual(resp.status_code, 200) - session_codename = session['codename'] - self.assertIn("This codename is what you will use in future visits", - resp.data) - codename = self._find_codename(resp.data) - self.assertEqual(len(codename.split()), Source.NUM_WORDS) - # codename is also stored in the session - make sure it matches the - # codename displayed to the source - self.assertEqual(codename, escape(session_codename)) - - def test_generate_already_logged_in(self): - with self.client as client: - new_codename(client, session) - # Make sure it redirects to /lookup when logged in - resp = client.get('/generate') - self.assertEqual(resp.status_code, 302) - # Make sure it flashes the message on the lookup page - resp = client.get('/generate', follow_redirects=True) - # Should redirect to /lookup - self.assertEqual(resp.status_code, 200) - self.assertIn("because you are already logged in.", resp.data) - - def test_create_new_source(self): - with self.client as c: - resp = c.get('/generate') - resp = c.post('/create', follow_redirects=True) - self.assertTrue(session['logged_in']) - # should be redirected to /lookup - self.assertIn("Submit Materials", resp.data) - - @patch('source.app.logger.warning') - @patch('crypto_util.genrandomid', - side_effect=[overly_long_codename, 'short codename']) - def test_generate_too_long_codename(self, genrandomid, logger): - """Generate a codename that exceeds the maximum codename length""" - - with self.client as c: - resp = c.post('/generate') - self.assertEqual(resp.status_code, 200) - - logger.assert_called_with( - "Generated a source codename that was too long, " - "skipping it. This should not happen. " - "(Codename='{}')".format(overly_long_codename) - ) - - @patch('source.app.logger.error') - def test_create_duplicate_codename(self, logger): - with self.client as c: - c.get('/generate') + +def test_create_duplicate_codename_logged_in_not_in_session(source_app): + with patch.object(source.app.logger, 'error') as logger: + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + + # Create a source the first time + resp = app.post(url_for('main.create'), follow_redirects=True) + assert resp.status_code == 200 + codename = session['codename'] + + with source_app.test_client() as app: + # Attempt to add the same source + with app.session_transaction() as sess: + sess['codename'] = codename + resp = app.post(url_for('main.create'), follow_redirects=True) + logger.assert_called_once() + assert ("Attempt to create a source with duplicate codename" + in logger.call_args[0][0]) + assert resp.status_code == 500 + assert 'codename' not in session + assert 'logged_in' not in session + + +def test_create_duplicate_codename_logged_in_in_session(source_app): + with patch.object(source.app.logger, 'error') as logger: + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 # Create a source the first time - c.post('/create', follow_redirects=True) + resp = app.post(url_for('main.create'), follow_redirects=True) + assert resp.status_code == 200 # Attempt to add the same source - c.post('/create', follow_redirects=True) + resp = app.post(url_for('main.create'), follow_redirects=True) logger.assert_called_once() - self.assertIn("Attempt to create a source with duplicate codename", - logger.call_args[0][0]) + assert ("Attempt to create a source with duplicate codename" + in logger.call_args[0][0]) + assert resp.status_code == 500 assert 'codename' not in session - def test_lookup(self): - """Test various elements on the /lookup page.""" - with self.client as client: - codename = new_codename(client, session) - resp = client.post('login', data=dict(codename=codename), - follow_redirects=True) - # redirects to /lookup - self.assertIn("public key", resp.data) - # download the public key - resp = client.get('journalist-key') - self.assertIn("BEGIN PGP PUBLIC KEY BLOCK", resp.data) - - def test_login_and_logout(self): - resp = self.client.get('/login') - self.assertEqual(resp.status_code, 200) - self.assertIn("Enter Codename", resp.data) - - with self.client as client: - codename = new_codename(client, session) - resp = client.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Submit Materials", resp.data) - self.assertTrue(session['logged_in']) - resp = client.get('/logout', follow_redirects=True) - - with self.client as c: - resp = c.post('/login', data=dict(codename='invalid'), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn('Sorry, that is not a recognized codename.', - resp.data) - self.assertNotIn('logged_in', session) - - with self.client as c: - resp = c.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertTrue(session['logged_in']) - resp = c.get('/logout', follow_redirects=True) - - # sessions always have 'expires', so pop it for the next check - session.pop('expires', None) - - self.assertNotIn('logged_in', session) - self.assertNotIn('codename', session) - - self.assertIn('Thank you for exiting your session!', resp.data) - - def test_user_must_log_in_for_protected_views(self): - with self.client as c: - resp = c.get('/lookup', follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Enter Codename", resp.data) - - def test_login_with_whitespace(self): - """ - Test that codenames with leading or trailing whitespace still work""" - - with self.client as client: - def login_test(codename): - resp = client.get('/login') - self.assertEqual(resp.status_code, 200) - self.assertIn("Enter Codename", resp.data) - - resp = client.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Submit Materials", resp.data) - self.assertTrue(session['logged_in']) - resp = client.get('/logout', follow_redirects=True) - - codename = new_codename(client, session) - login_test(codename + ' ') - login_test(' ' + codename + ' ') - login_test(' ' + codename) - - def _dummy_submission(self, client): - """ - Helper to make a submission (content unimportant), mostly useful in - testing notification behavior for a source's first vs. their - subsequent submissions - """ - return client.post('/submit', data=dict( - msg="Pay no attention to the man behind the curtain.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - - def test_initial_submission_notification(self): - """ - Regardless of the type of submission (message, file, or both), the - first submission is always greeted with a notification - reminding sources to check back later for replies. - """ - with self.client as client: - new_codename(client, session) - resp = self._dummy_submission(client) - self.assertEqual(resp.status_code, 200) - self.assertIn( - "Thank you for sending this information to us.", - resp.data) - - def test_submit_message(self): - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Thanks! We received your message", resp.data) - - def test_submit_empty_message(self): - with self.client as client: - new_codename(client, session) - resp = client.post('/submit', data=dict( - msg="", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertIn("You must enter a message or choose a file to " - "submit.", - resp.data) - - def test_submit_big_message(self): - ''' - When the message is larger than 512KB it's written to disk instead of - just residing in memory. Make sure the different return type of - SecureTemporaryFile is handled as well as BytesIO. - ''' - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="AA" * (1024 * 512), - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Thanks! We received your message", resp.data) - - def test_submit_file(self): - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="", - fh=(StringIO('This is a test'), 'test.txt'), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn('Thanks! We received your document', resp.data) - - def test_submit_both(self): - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( + # Reproducer for bug #4361 + resp = app.post(url_for('main.index'), follow_redirects=True) + assert 'logged_in' not in session + + +def test_lookup(source_app): + """Test various elements on the /lookup page.""" + with source_app.test_client() as app: + codename = new_codename(app, session) + resp = app.post(url_for('main.login'), data=dict(codename=codename), + follow_redirects=True) + # redirects to /lookup + text = resp.data.decode('utf-8') + assert "public key" in text + # download the public key + resp = app.get(url_for('info.download_journalist_pubkey')) + text = resp.data.decode('utf-8') + assert "BEGIN PGP PUBLIC KEY BLOCK" in text + + +def test_login_and_logout(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('main.login')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Enter Codename" in text + + codename = new_codename(app, session) + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Submit Files" in text + assert session['logged_in'] is True + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(codename='invalid'), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert 'Sorry, that is not a recognized codename.' in text + assert 'logged_in' not in session + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + assert session['logged_in'] is True + + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + assert session['logged_in'] is True + + resp = app.get(url_for('main.logout'), + follow_redirects=True) + assert 'logged_in' not in session + assert 'codename' not in session + text = resp.data.decode('utf-8') + assert 'Thank you for exiting your session!' in text + + +def test_user_must_log_in_for_protected_views(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('main.lookup'), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Enter Codename" in text + + +def test_login_with_whitespace(source_app): + """ + Test that codenames with leading or trailing whitespace still work""" + + def login_test(app, codename): + resp = app.get(url_for('main.login')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Enter Codename" in text + + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Submit Files" in text + assert session['logged_in'] is True + + with source_app.test_client() as app: + codename = new_codename(app, session) + + codenames = [ + codename + ' ', + ' ' + codename + ' ', + ' ' + codename, + ] + + for codename_ in codenames: + with source_app.test_client() as app: + login_test(app, codename_) + + +def _dummy_submission(app): + """ + Helper to make a submission (content unimportant), mostly useful in + testing notification behavior for a source's first vs. their + subsequent submissions + """ + return app.post( + url_for('main.submit'), + data=dict(msg="Pay no attention to the man behind the curtain.", + fh=(BytesIO(b''), '')), + follow_redirects=True) + + +def test_initial_submission_notification(source_app): + """ + Regardless of the type of submission (message, file, or both), the + first submission is always greeted with a notification + reminding sources to check back later for replies. + """ + with source_app.test_client() as app: + new_codename(app, session) + resp = _dummy_submission(app) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Thank you for sending this information to us." in text + + +def test_submit_message(source_app): + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Thanks! We received your message" in text + + +def test_submit_empty_message(source_app): + with source_app.test_client() as app: + new_codename(app, session) + resp = app.post( + url_for('main.submit'), + data=dict(msg="", fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "You must enter a message or choose a file to submit." \ + in text + + +def test_submit_big_message(source_app): + ''' + When the message is larger than 512KB it's written to disk instead of + just residing in memory. Make sure the different return type of + SecureTemporaryFile is handled as well as BytesIO. + ''' + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict(msg="AA" * (1024 * 512), fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Thanks! We received your message" in text + + +def test_submit_file(source_app): + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict(msg="", fh=(BytesIO(b'This is a test'), 'test.txt')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert 'Thanks! We received your document' in text + + +def test_submit_both(source_app): + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict( msg="This is a test", - fh=(StringIO('This is a test'), 'test.txt'), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Thanks! We received your message and document", - resp.data) - - @patch('source_app.main.async_genkey') - @patch('source_app.main.get_entropy_estimate') - def test_submit_message_with_low_entropy(self, get_entropy_estimate, - async_genkey): - get_entropy_estimate.return_value = 300 - - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertFalse(async_genkey.called) - - @patch('source_app.main.async_genkey') - @patch('source_app.main.get_entropy_estimate') - def test_submit_message_with_enough_entropy(self, get_entropy_estimate, - async_genkey): - get_entropy_estimate.return_value = 2400 - - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertTrue(async_genkey.called) - - def test_delete_all_successfully_deletes_replies(self): + fh=(BytesIO(b'This is a test'), 'test.txt')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Thanks! We received your message and document" in text + + +def test_submit_message_with_low_entropy(source_app): + with patch.object(source_app_main, 'async_genkey') as async_genkey: + with patch.object(source_app_main, 'get_entropy_estimate') \ + as get_entropy_estimate: + get_entropy_estimate.return_value = 300 + + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + assert not async_genkey.called + + +def test_submit_message_with_enough_entropy(source_app): + with patch.object(source_app_main, 'async_genkey') as async_genkey: + with patch.object(source_app_main, 'get_entropy_estimate') \ + as get_entropy_estimate: + get_entropy_estimate.return_value = 2400 + + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict(msg="This is a test.", fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + assert async_genkey.called + + +def test_delete_all_successfully_deletes_replies(source_app): + with source_app.app_context(): journalist, _ = utils.db_helper.init_journalist() source, codename = utils.db_helper.init_source() + source_id = source.id utils.db_helper.reply(journalist, source, 1) - with self.client as c: - resp = c.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - resp = c.post('/delete-all', follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("All replies have been deleted", resp.data) - - @patch('source.app.logger.error') - def test_delete_all_replies_already_deleted(self, logger): + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.post(url_for('main.batch_delete'), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "All replies have been deleted" in text + + with source_app.app_context(): + source = Source.query.get(source_id) + replies = Reply.query.filter(Reply.source_id == source_id).all() + for reply in replies: + assert reply.deleted_by_source is True + + +def test_delete_all_replies_deleted_by_source_but_not_journalist(source_app): + """Replies can be deleted by a source, but not by journalists. As such, + replies may still exist in the replies table, but no longer be visible.""" + with source_app.app_context(): journalist, _ = utils.db_helper.init_journalist() source, codename = utils.db_helper.init_source() - # Note that we are creating the source and no replies - - with self.client as c: - resp = c.post('/login', data=dict(codename=codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - resp = c.post('/delete-all', follow_redirects=True) - self.assertEqual(resp.status_code, 200) + utils.db_helper.reply(journalist, source, 1) + replies = Reply.query.filter(Reply.source_id == source.id).all() + for reply in replies: + reply.deleted_by_source = True + db.session.add(reply) + db.session.commit() + + with source_app.test_client() as app: + with patch.object(source_app.logger, 'error') as logger: + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.post(url_for('main.batch_delete'), + follow_redirects=True) + assert resp.status_code == 200 logger.assert_called_once_with( "Found no replies when at least one was expected" ) - @patch('gzip.GzipFile', wraps=gzip.GzipFile) - def test_submit_sanitizes_filename(self, gzipfile): - """Test that upload file name is sanitized""" - insecure_filename = '../../bin/gpg' - sanitized_filename = 'bin_gpg' - - with self.client as client: - new_codename(client, session) - client.post('/submit', data=dict( - msg="", - fh=(StringIO('This is a test'), insecure_filename), - ), follow_redirects=True) - gzipfile.assert_called_with(filename=sanitized_filename, - mode=ANY, - fileobj=ANY) - - def test_tor2web_warning_headers(self): - resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')]) - self.assertEqual(resp.status_code, 200) - self.assertIn("You appear to be using Tor2Web.", resp.data) - - def test_tor2web_warning(self): - resp = self.client.get('/tor2web-warning') - self.assertEqual(resp.status_code, 200) - self.assertIn("Why is there a warning about Tor2Web?", resp.data) - - def test_why_use_tor_browser(self): - resp = self.client.get('/use-tor') - self.assertEqual(resp.status_code, 200) - self.assertIn("You Should Use Tor Browser", resp.data) - - def test_why_journalist_key(self): - resp = self.client.get('/why-journalist-key') - self.assertEqual(resp.status_code, 200) - self.assertIn("Why download the journalist's public key?", resp.data) - - def test_metadata_route(self): - resp = self.client.get('/metadata') - self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.headers.get('Content-Type'), 'application/json') - self.assertEqual(json.loads(resp.data.decode('utf-8')).get( - 'sd_version'), version.__version__) - - @patch('crypto_util.hash_codename') - def test_login_with_overly_long_codename(self, mock_hash_codename): - """Attempting to login with an overly long codename should result in - an error, and scrypt should not be called to avoid DoS.""" - with self.client as c: - resp = c.post('/login', data=dict(codename=overly_long_codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Field must be between 1 and {} " - "characters long.".format(Source.MAX_CODENAME_LEN), - resp.data) - self.assertFalse(mock_hash_codename.called, - "Called hash_codename for codename w/ invalid " - "length") - - @patch('source.app.logger.warning') - @patch('subprocess.call', return_value=1) - def test_failed_normalize_timestamps_logs_warning(self, call, logger): - """If a normalize timestamps event fails, the subprocess that calls - touch will fail and exit 1. When this happens, the submission should - still occur, but a warning should be logged (this will trigger an - OSSEC alert).""" - - with self.client as client: - new_codename(client, session) - self._dummy_submission(client) - resp = client.post('/submit', data=dict( - msg="This is a test.", - fh=(StringIO(''), ''), - ), follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Thanks! We received your message", resp.data) +def test_delete_all_replies_already_deleted_by_journalists(source_app): + with source_app.app_context(): + journalist, _ = utils.db_helper.init_journalist() + source, codename = utils.db_helper.init_source() + # Note that we are creating the source and no replies + + with source_app.test_client() as app: + with patch.object(source_app.logger, 'error') as logger: + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.post(url_for('main.batch_delete'), + follow_redirects=True) + assert resp.status_code == 200 logger.assert_called_once_with( - "Couldn't normalize submission " - "timestamps (touch exited with 1)" + "Found no replies when at least one was expected" ) - @patch('source.app.logger.error') - def test_source_is_deleted_while_logged_in(self, logger): - """If a source is deleted by a journalist when they are logged in, - a NoResultFound will occur. The source should be redirected to the - index when this happens, and a warning logged.""" - with self.client as client: - codename = new_codename(client, session) - resp = client.post('login', data=dict(codename=codename), - follow_redirects=True) +def test_submit_sanitizes_filename(source_app): + """Test that upload file name is sanitized""" + insecure_filename = '../../bin/gpg' + sanitized_filename = 'bin_gpg' + + with patch.object(gzip, 'GzipFile', wraps=gzip.GzipFile) as gzipfile: + with source_app.test_client() as app: + new_codename(app, session) + resp = app.post( + url_for('main.submit'), + data=dict( + msg="", + fh=(BytesIO(b'This is a test'), insecure_filename)), + follow_redirects=True) + assert resp.status_code == 200 + gzipfile.assert_called_with(filename=sanitized_filename, + mode=ANY, + fileobj=ANY, + mtime=0) + + +def test_tor2web_warning_headers(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('main.index'), + headers=[('X-tor2web', 'encrypted')]) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "You appear to be using Tor2Web." in text + + +def test_tor2web_warning(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('info.tor2web_warning')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Why is there a warning about Tor2Web?" in text + + +def test_why_use_tor_browser(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('info.recommend_tor_browser')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "You Should Use Tor Browser" in text + + +def test_why_journalist_key(source_app): + with source_app.test_client() as app: + resp = app.get(url_for('info.why_download_journalist_pubkey')) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Why download the journalist's public key?" in text + + +def test_metadata_route(config, source_app): + with patch.object(source_app_api.platform, "linux_distribution") as mocked_platform: + mocked_platform.return_value = ("Ubuntu", "16.04", "xenial") + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert resp.json.get('allow_document_uploads') ==\ + InstanceConfig.get_current().allow_document_uploads + assert resp.json.get('sd_version') == version.__version__ + assert resp.json.get('server_os') == '16.04' + assert resp.json.get('supported_languages') ==\ + config.SUPPORTED_LOCALES + + +def test_login_with_overly_long_codename(source_app): + """Attempting to login with an overly long codename should result in + an error, and scrypt should not be called to avoid DoS.""" + with patch.object(crypto_util.CryptoUtil, 'hash_codename') \ + as mock_hash_codename: + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(codename=overly_long_codename), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert ("Field must be between 1 and {} characters long." + .format(Source.MAX_CODENAME_LEN)) in text + assert not mock_hash_codename.called, \ + "Called hash_codename for codename w/ invalid length" + + +def test_failed_normalize_timestamps_logs_warning(source_app): + """If a normalize timestamps event fails, the subprocess that calls + touch will fail and exit 1. When this happens, the submission should + still occur, but a warning should be logged (this will trigger an + OSSEC alert).""" + + with patch("source_app.main.get_entropy_estimate", return_value=8192): + with patch.object(source_app.logger, 'warning') as logger: + with patch.object(subprocess, 'call', return_value=1): + with source_app.test_client() as app: + new_codename(app, session) + _dummy_submission(app) + resp = app.post( + url_for('main.submit'), + data=dict( + msg="This is a test.", + fh=(StringIO(''), '')), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Thanks! We received your message" in text + + logger.assert_called_once_with( + "Couldn't normalize submission " + "timestamps (touch exited with 1)" + ) + + +def test_source_is_deleted_while_logged_in(source_app): + """If a source is deleted by a journalist when they are logged in, + a NoResultFound will occur. The source should be redirected to the + index when this happens, and a warning logged.""" + + with patch.object(source_app.logger, 'error') as logger: + with source_app.test_client() as app: + codename = new_codename(app, session) + resp = app.post('login', data=dict(codename=codename), + follow_redirects=True) # Now the journalist deletes the source - filesystem_id = crypto_util.hash_codename(codename) - crypto_util.delete_reply_keypair(filesystem_id) - source = Source.query.filter_by(filesystem_id=filesystem_id).one() - db_session.delete(source) - db_session.commit() + filesystem_id = source_app.crypto_util.hash_codename(codename) + source_app.crypto_util.delete_reply_keypair(filesystem_id) + source = Source.query.filter_by( + filesystem_id=filesystem_id).one() + db.session.delete(source) + db.session.commit() # Source attempts to continue to navigate - resp = client.post('/lookup', follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn('Submit documents for the first time', resp.data) - self.assertNotIn('logged_in', session.keys()) - self.assertNotIn('codename', session.keys()) + resp = app.post(url_for('main.lookup'), follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert 'First submission' in text + assert 'logged_in' not in session + assert 'codename' not in session logger.assert_called_once_with( "Found no Sources when one was expected: " "No row was found for one()") - def test_login_with_invalid_codename(self): - """Logging in with a codename with invalid characters should return - an informative message to the user.""" - invalid_codename = '[]' +def test_login_with_invalid_codename(source_app): + """Logging in with a codename with invalid characters should return + an informative message to the user.""" - with self.client as c: - resp = c.post('/login', data=dict(codename=invalid_codename), - follow_redirects=True) - self.assertEqual(resp.status_code, 200) - self.assertIn("Invalid input.", resp.data) + invalid_codename = '[]' + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(codename=invalid_codename), + follow_redirects=True) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Invalid input." in text - def _test_source_session_expiration(self): - try: - old_expiration = config.SESSION_EXPIRATION_MINUTES - has_session_expiration = True - except AttributeError: - has_session_expiration = False - try: - with self.client as client: - codename = new_codename(client, session) +def test_source_session_expiration(config, source_app): + with source_app.test_client() as app: + codename = new_codename(app, session) - # set the expiration to ensure we trigger an expiration - config.SESSION_EXPIRATION_MINUTES = -1 + # set the expiration to ensure we trigger an expiration + config.SESSION_EXPIRATION_MINUTES = -1 - resp = client.post('/login', - data=dict(codename=codename), - follow_redirects=True) - assert resp.status_code == 200 - resp = client.get('/lookup', follow_redirects=True) - - # check that the session was cleared (apart from 'expires' - # which is always present and 'csrf_token' which leaks no info) - session.pop('expires', None) - session.pop('csrf_token', None) - assert not session, session - assert ('You have been logged out due to inactivity' in - resp.data.decode('utf-8')) - finally: - if has_session_expiration: - config.SESSION_EXPIRATION_MINUTES = old_expiration - else: - del config.SESSION_EXPIRATION_MINUTES - - def test_csrf_error_page(self): - old_enabled = self.app.config['WTF_CSRF_ENABLED'] - self.app.config['WTF_CSRF_ENABLED'] = True - - try: - with self.app.test_client() as app: - resp = app.post(url_for('main.create')) - self.assertRedirects(resp, url_for('main.index')) - - resp = app.post(url_for('main.create'), follow_redirects=True) - self.assertIn('Your session timed out due to inactivity', - resp.data) - finally: - self.app.config['WTF_CSRF_ENABLED'] = old_enabled + resp = app.post(url_for('main.login'), + data=dict(codename=codename), + follow_redirects=True) + assert resp.status_code == 200 + resp = app.get(url_for('main.lookup'), follow_redirects=True) + + # check that the session was cleared (apart from 'expires' + # which is always present and 'csrf_token' which leaks no info) + session.pop('expires', None) + session.pop('csrf_token', None) + assert not session + + text = resp.data.decode('utf-8') + assert 'Your session timed out due to inactivity' in text + + +def test_source_session_expiration_create(config, source_app): + with source_app.test_client() as app: + + seconds_session_expire = 1 + config.SESSION_EXPIRATION_MINUTES = seconds_session_expire / 60. + + # Make codename, and then wait for session to expire. + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + + time.sleep(seconds_session_expire + 0.1) + + # Now when we click create, the session will have expired. + resp = app.post(url_for('main.create'), follow_redirects=True) + + # check that the session was cleared (apart from 'expires' + # which is always present and 'csrf_token' which leaks no info) + session.pop('expires', None) + session.pop('csrf_token', None) + assert not session + + text = resp.data.decode('utf-8') + assert 'Your session timed out due to inactivity' in text + + +def test_csrf_error_page(config, source_app): + source_app.config['WTF_CSRF_ENABLED'] = True + with source_app.test_client() as app: + with InstrumentedApp(source_app) as ins: + resp = app.post(url_for('main.create')) + ins.assert_redirects(resp, url_for('main.index')) + + resp = app.post(url_for('main.create'), follow_redirects=True) + text = resp.data.decode('utf-8') + assert 'Your session timed out due to inactivity' in text + + +def test_source_can_only_delete_own_replies(source_app): + '''This test checks for a bug an authenticated source A could delete + replies send to source B by "guessing" the filename. + ''' + source0, codename0 = utils.db_helper.init_source() + source1, codename1 = utils.db_helper.init_source() + journalist, _ = utils.db_helper.init_journalist() + replies = utils.db_helper.reply(journalist, source0, 1) + filename = replies[0].filename + confirmation_msg = 'Reply deleted' + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename1}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source1.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 404 + assert confirmation_msg not in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert not reply.deleted_by_source + + with source_app.test_client() as app: + resp = app.post(url_for('main.login'), + data={'codename': codename0}, + follow_redirects=True) + assert resp.status_code == 200 + assert g.source.id == source0.id + + resp = app.post(url_for('main.delete'), + data={'reply_filename': filename}, + follow_redirects=True) + assert resp.status_code == 200 + assert confirmation_msg in resp.data.decode('utf-8') + + reply = Reply.query.filter_by(filename=filename).one() + assert reply.deleted_by_source diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -1,145 +1,358 @@ # -*- coding: utf-8 -*- +import logging import os -import shutil -import unittest +import io +import pytest +import re +import stat import zipfile os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -from db import db_session +from . import utils + +from db import db +from journalist_app import create_app +from models import Submission, Reply import store -import utils +from store import Storage, queued_add_checksum_for_file, async_add_checksum_for_file + + +def create_file_in_source_dir(config, filesystem_id, filename): + """Helper function for simulating files""" + source_directory = os.path.join(config.STORE_DIR, + filesystem_id) + os.makedirs(source_directory) + + file_path = os.path.join(source_directory, filename) + with io.open(file_path, 'a'): + os.utime(file_path, None) + + return source_directory, file_path + + +def test_path_returns_filename_of_folder(journalist_app, config): + """`Storage.path` is called in this way in + journalist.delete_collection + """ + filesystem_id = 'example' + generated_absolute_path = journalist_app.storage.path(filesystem_id) + + expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id) + assert generated_absolute_path == expected_absolute_path + + +def test_path_returns_filename_of_items_within_folder(journalist_app, config): + """`Storage.path` is called in this way in journalist.bulk_delete""" + filesystem_id = 'example' + item_filename = '1-quintuple_cant-msg.gpg' + generated_absolute_path = journalist_app.storage.path(filesystem_id, + item_filename) + + expected_absolute_path = os.path.join(config.STORE_DIR, + filesystem_id, item_filename) + assert generated_absolute_path == expected_absolute_path + + +def test_path_without_filesystem_id(journalist_app, config): + filesystem_id = 'example' + item_filename = '1-quintuple_cant-msg.gpg' + + basedir = os.path.join(config.STORE_DIR, filesystem_id) + os.makedirs(basedir) + path_to_file = os.path.join(basedir, item_filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) -class TestStore(unittest.TestCase): + generated_absolute_path = \ + journalist_app.storage.path_without_filesystem_id(item_filename) - """The set of tests for store.py.""" + expected_absolute_path = os.path.join(config.STORE_DIR, + filesystem_id, item_filename) + assert generated_absolute_path == expected_absolute_path - def setUp(self): - utils.env.setup() - def tearDown(self): - utils.env.teardown() - db_session.remove() +def test_path_without_filesystem_id_duplicate_files(journalist_app, config): + filesystem_id = 'example' + filesystem_id_duplicate = 'example2' + item_filename = '1-quintuple_cant-msg.gpg' - def create_file_in_source_dir(self, filesystem_id, filename): - """Helper function for simulating files""" - source_directory = os.path.join(config.STORE_DIR, - filesystem_id) - os.makedirs(source_directory) + basedir = os.path.join(config.STORE_DIR, filesystem_id) + duplicate_basedir = os.path.join(config.STORE_DIR, filesystem_id_duplicate) - file_path = os.path.join(source_directory, filename) - with open(file_path, 'a'): - os.utime(file_path, None) + for directory in [basedir, duplicate_basedir]: + os.makedirs(directory) + path_to_file = os.path.join(directory, item_filename) + with open(path_to_file, 'a'): + os.utime(path_to_file, None) - return source_directory, file_path + with pytest.raises(store.TooManyFilesException): + journalist_app.storage.path_without_filesystem_id(item_filename) - def test_path_returns_filename_of_folder(self): - """store.path is called in this way in journalist.delete_collection""" - filesystem_id = 'example' - generated_absolute_path = store.path(filesystem_id) +def test_path_without_filesystem_id_no_file(journalist_app, config): + item_filename = 'not there' + with pytest.raises(store.NoFileFoundException): + journalist_app.storage.path_without_filesystem_id(item_filename) - expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id) - self.assertEquals(generated_absolute_path, expected_absolute_path) - def test_path_returns_filename_of_items_within_folder(self): - """store.path is called in this way in journalist.bulk_delete""" - filesystem_id = 'example' - item_filename = '1-quintuple_cant-msg.gpg' +def test_verify_path_not_absolute(journalist_app, config): + with pytest.raises(store.PathException): + journalist_app.storage.verify( + os.path.join(config.STORE_DIR, '..', 'etc', 'passwd')) - generated_absolute_path = store.path(filesystem_id, item_filename) - expected_absolute_path = os.path.join(config.STORE_DIR, - filesystem_id, item_filename) - self.assertEquals(generated_absolute_path, expected_absolute_path) +def test_verify_in_store_dir(journalist_app, config): + with pytest.raises(store.PathException) as e: + path = config.STORE_DIR + "_backup" + journalist_app.storage.verify(path) + assert e.message == "Path not valid in store: {}".format(path) - def test_verify_path_not_absolute(self): - with self.assertRaises(store.PathException): - store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd')) - def test_verify_in_store_dir(self): - with self.assertRaisesRegexp(store.PathException, 'Invalid directory'): - store.verify(config.STORE_DIR + "_backup") +def test_verify_store_path_not_absolute(journalist_app): + with pytest.raises(store.PathException) as e: + journalist_app.storage.verify('..') + assert e.message == "Path not valid in store: .." - def test_verify_store_dir_not_absolute(self): - STORE_DIR = config.STORE_DIR - try: - with self.assertRaisesRegexp( - store.PathException, - 'config.STORE_DIR\(\S*\) is not absolute'): - config.STORE_DIR = '.' - store.verify('something') - finally: - config.STORE_DIR = STORE_DIR - def test_verify_flagged_file_in_sourcedir_returns_true(self): - source_directory, file_path = self.create_file_in_source_dir( - 'example-filesystem-id', '_FLAG' - ) +def test_verify_rejects_symlinks(journalist_app): + """ + Test that verify rejects paths involving links outside the store. + """ + try: + link = os.path.join(journalist_app.storage.storage_path, "foo") + os.symlink("/foo", link) + with pytest.raises(store.PathException) as e: + journalist_app.storage.verify(link) + assert e.message == "Path not valid in store: {}".format(link) + finally: + os.unlink(link) - self.assertTrue(store.verify(file_path)) - shutil.rmtree(source_directory) # Clean up created files +def test_verify_store_dir_not_absolute(): + with pytest.raises(store.PathException) as exc_info: + Storage('..', '/', '<not a gpg key>') - def test_verify_invalid_file_extension_in_sourcedir_raises_exception(self): - source_directory, file_path = self.create_file_in_source_dir( - 'example-filesystem-id', 'not_valid.txt' - ) + msg = str(exc_info.value) + assert re.compile('storage_path.*is not absolute').match(msg) - with self.assertRaisesRegexp( - store.PathException, - 'Invalid file extension .txt'): - store.verify(file_path) - shutil.rmtree(source_directory) # Clean up created files +def test_verify_store_temp_dir_not_absolute(): + with pytest.raises(store.PathException) as exc_info: + Storage('/', '..', '<not a gpg key>') - def test_verify_invalid_filename_in_sourcedir_raises_exception(self): - source_directory, file_path = self.create_file_in_source_dir( - 'example-filesystem-id', 'NOTVALID.gpg' - ) + msg = str(exc_info.value) + assert re.compile('temp_dir.*is not absolute').match(msg) - with self.assertRaisesRegexp( - store.PathException, - 'Invalid filename NOTVALID.gpg'): - store.verify(file_path) - shutil.rmtree(source_directory) # Clean up created files +def test_verify_regular_submission_in_sourcedir_returns_true(journalist_app, config): + """ + Tests that verify is happy with a regular submission file. - def test_get_zip(self): - source, _ = utils.db_helper.init_source() - submissions = utils.db_helper.submit(source, 2) + Verify should return True for a regular file that matches the + naming scheme of submissions. + """ + source_directory, file_path = create_file_in_source_dir( + config, 'example-filesystem-id', '1-regular-doc.gz.gpg' + ) + + assert journalist_app.storage.verify(file_path) + + +def test_verify_invalid_file_extension_in_sourcedir_raises_exception( + journalist_app, config): + + source_directory, file_path = create_file_in_source_dir( + config, 'example-filesystem-id', 'not_valid.txt' + ) + + with pytest.raises(store.PathException) as e: + journalist_app.storage.verify(file_path) + + assert 'Path not valid in store: {}'.format(file_path) in str(e) + + +def test_verify_invalid_filename_in_sourcedir_raises_exception( + journalist_app, config): + + source_directory, file_path = create_file_in_source_dir( + config, 'example-filesystem-id', 'NOTVALID.gpg' + ) + + with pytest.raises(store.PathException) as e: + journalist_app.storage.verify(file_path) + assert e.message == 'Path not valid in store: {}'.format(file_path) + + +def test_get_zip(journalist_app, test_source, config): + with journalist_app.app_context(): + submissions = utils.db_helper.submit( + test_source['source'], 2) filenames = [os.path.join(config.STORE_DIR, - source.filesystem_id, + test_source['filesystem_id'], submission.filename) for submission in submissions] - archive = zipfile.ZipFile(store.get_bulk_archive(submissions)) + archive = zipfile.ZipFile( + journalist_app.storage.get_bulk_archive(submissions)) archivefile_contents = archive.namelist() - for archived_file, actual_file in zip(archivefile_contents, filenames): - actual_file_content = open(actual_file).read() - zipped_file_content = archive.read(archived_file) - self.assertEquals(zipped_file_content, actual_file_content) - - def test_rename_valid_submission(self): - source, _ = utils.db_helper.init_source() - old_journalist_filename = source.journalist_filename - old_filename = utils.db_helper.submit(source, 1)[0].filename - new_journalist_filename = 'nestor_makhno' - expected_filename = old_filename.replace(old_journalist_filename, - new_journalist_filename) - actual_filename = store.rename_submission( - source.filesystem_id, old_filename, - new_journalist_filename) - self.assertEquals(actual_filename, expected_filename) - - def test_rename_submission_with_invalid_filename(self): - original_filename = '1-quintuple_cant-msg.gpg' - returned_filename = store.rename_submission( - 'example-filesystem-id', original_filename, - 'this-new-filename-should-not-be-returned') - - # None of the above files exist, so we expect the attempt to rename - # the submission to fail and the original filename to be returned. - self.assertEquals(original_filename, returned_filename) + for archived_file, actual_file in zip(archivefile_contents, filenames): + with io.open(actual_file, 'rb') as f: + actual_file_content = f.read() + zipped_file_content = archive.read(archived_file) + assert zipped_file_content == actual_file_content + + [email protected]('db_model', [Submission, Reply]) +def test_add_checksum_for_file(config, db_model): + ''' + Check that when we execute the `add_checksum_for_file` function, the database object is + correctly updated with the actual hash of the file. + + We have to create our own app in order to have more control over the SQLAlchemy sessions. The + fixture pushes a single app context that forces us to work within a single transaction. + ''' + app = create_app(config) + + with app.app_context(): + db.create_all() + source, _ = utils.db_helper.init_source_without_keypair() + target_file_path = app.storage.path(source.filesystem_id, '1-foo-msg.gpg') + test_message = b'hash me!' + expected_hash = 'f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927' + + with open(target_file_path, 'wb') as f: + f.write(test_message) + + if db_model == Submission: + db_obj = Submission(source, target_file_path) + else: + journalist, _ = utils.db_helper.init_journalist() + db_obj = Reply(journalist, source, target_file_path) + + db.session.add(db_obj) + db.session.commit() + db_obj_id = db_obj.id + + queued_add_checksum_for_file(db_model, + db_obj_id, + target_file_path, + app.config['SQLALCHEMY_DATABASE_URI']) + + with app.app_context(): + # requery to get a new object + db_obj = db_model.query.filter_by(id=db_obj_id).one() + assert db_obj.checksum == 'sha256:' + expected_hash + + [email protected]('db_model', [Submission, Reply]) +def test_async_add_checksum_for_file(config, db_model): + ''' + Check that when we execute the `add_checksum_for_file` function, the database object is + correctly updated with the actual hash of the file. + + We have to create our own app in order to have more control over the SQLAlchemy sessions. The + fixture pushes a single app context that forces us to work within a single transaction. + ''' + app = create_app(config) + + with app.app_context(): + db.create_all() + source, _ = utils.db_helper.init_source_without_keypair() + target_file_path = app.storage.path(source.filesystem_id, '1-foo-msg.gpg') + test_message = b'hash me!' + expected_hash = 'f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927' + + with open(target_file_path, 'wb') as f: + f.write(test_message) + + if db_model == Submission: + db_obj = Submission(source, target_file_path) + else: + journalist, _ = utils.db_helper.init_journalist() + db_obj = Reply(journalist, source, target_file_path) + + db.session.add(db_obj) + db.session.commit() + db_obj_id = db_obj.id + + job = async_add_checksum_for_file(db_obj) + + utils.asynchronous.wait_for_redis_worker(job, timeout=5) + + with app.app_context(): + # requery to get a new object + db_obj = db_model.query.filter_by(id=db_obj_id).one() + assert db_obj.checksum == 'sha256:' + expected_hash + + +def test_path_configuration_is_immutable(journalist_app): + """ + Check that the store's paths cannot be changed. + + They're exposed via properties that are supposed to be + read-only. It is of course possible to change them via the mangled + attribute names, but we want to confirm that accidental changes + are prevented. + """ + with pytest.raises(AttributeError): + journalist_app.storage.storage_path = "/foo" + + original_storage_path = journalist_app.storage.storage_path[:] + journalist_app.storage.__storage_path = "/foo" + assert journalist_app.storage.storage_path == original_storage_path + + with pytest.raises(AttributeError): + journalist_app.storage.shredder_path = "/foo" + + original_shredder_path = journalist_app.storage.shredder_path[:] + journalist_app.storage.__shredder_path = "/foo" + assert journalist_app.storage.shredder_path == original_shredder_path + + +def test_shredder_configuration(journalist_app): + """ + Ensure we're creating the shredder directory correctly. + + We want to ensure that it's a sibling of the store directory, with + mode 0700. + """ + store_path = journalist_app.storage.storage_path + shredder_path = journalist_app.storage.shredder_path + assert os.path.dirname(shredder_path) == os.path.dirname(store_path) + s = os.stat(shredder_path) + assert stat.S_ISDIR(s.st_mode) is True + assert stat.S_IMODE(s.st_mode) == 0o700 + + +def test_shredder_deletes_symlinks(journalist_app, caplog): + """ + Confirm that `store.clear_shredder` removes any symlinks in the shredder. + """ + caplog.set_level(logging.DEBUG) + + link_target = "/foo" + link = os.path.abspath(os.path.join(journalist_app.storage.shredder_path, "foo")) + os.symlink(link_target, link) + journalist_app.storage.clear_shredder() + assert "Deleting link {} to {}".format(link, link_target) in caplog.text + assert not os.path.exists(link) + + +def test_shredder_shreds(journalist_app, caplog): + """ + Confirm that `store.clear_shredder` removes files. + """ + caplog.set_level(logging.DEBUG) + + testdir = os.path.abspath(os.path.join(journalist_app.storage.shredder_path, "testdir")) + os.makedirs(testdir) + testfile = os.path.join(testdir, "testfile") + with open(testfile, "w") as f: + f.write("testdata\n") + + journalist_app.storage.clear_shredder() + assert "Securely deleted file 1/1: {}".format(testfile) in caplog.text + assert not os.path.isfile(testfile) + assert not os.path.isdir(testdir) diff --git a/securedrop/tests/test_submission_cleanup.py b/securedrop/tests/test_submission_cleanup.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_submission_cleanup.py @@ -0,0 +1,71 @@ +import argparse +import os + +from db import db +from management import submissions +from models import Submission + +from tests import utils + + +def test_delete_disconnected_db_submissions(journalist_app, config): + """ + Test that Submission records without corresponding files are deleted. + """ + with journalist_app.app_context(): + source, _ = utils.db_helper.init_source() + source_id = source.id + + # make two submissions + utils.db_helper.submit(source, 2) + submission_id = source.submissions[0].id + + # remove one submission's file + f1 = os.path.join(config.STORE_DIR, source.filesystem_id, source.submissions[0].filename) + assert os.path.exists(f1) + os.remove(f1) + assert os.path.exists(f1) is False + + # check that the single disconnect is seen + disconnects = submissions.find_disconnected_db_submissions(config.STORE_DIR) + assert len(disconnects) == 1 + assert disconnects[0].filename == source.submissions[0].filename + + # remove the disconnected Submission + args = argparse.Namespace(force=True, store_dir=config.STORE_DIR) + submissions.delete_disconnected_db_submissions(args) + + assert db.session.query(Submission).filter(Submission.id == submission_id).count() == 0 + assert db.session.query(Submission).filter(Submission.source_id == source_id).count() == 1 + + +def test_delete_disconnected_fs_submissions(journalist_app, config): + """ + Test that files in the store without corresponding Submission records are deleted. + """ + source, _ = utils.db_helper.init_source() + + # make two submissions + utils.db_helper.submit(source, 2) + source_filesystem_id = source.filesystem_id + submission_filename = source.submissions[0].filename + disconnect_path = os.path.join(config.STORE_DIR, source_filesystem_id, submission_filename) + + # make two replies, to make sure that their files are not seen + # as disconnects + journalist, _ = utils.db_helper.init_journalist("Mary", "Lane") + utils.db_helper.reply(journalist, source, 2) + + # delete the first Submission record + db.session.delete(source.submissions[0]) + db.session.commit() + + disconnects = submissions.find_disconnected_fs_submissions(config.STORE_DIR) + assert len(disconnects) == 1 + assert disconnects[0] == disconnect_path + assert os.path.exists(disconnect_path) + + args = argparse.Namespace(force=True, store_dir=config.STORE_DIR) + submissions.delete_disconnected_fs_submissions(args) + + assert os.path.exists(disconnect_path) is False diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py --- a/securedrop/tests/test_template_filters.py +++ b/securedrop/tests/test_template_filters.py @@ -1,120 +1,120 @@ # -*- coding: utf-8 -*- -import argparse -import logging -from datetime import datetime, timedelta import os +from datetime import datetime +from datetime import timedelta -from flask import session - -os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config +from db import db import i18n +import i18n_tool import journalist_app -import manage import source_app import template_filters -import version - - -class TestTemplateFilters(object): - - def get_fake_config(self): - class Config: - def __getattr__(self, name): - return getattr(config, name) - return Config() - - def verify_rel_datetime_format(self, app): - with app.test_client() as c: - c.get('/') - assert session.get('locale') is None - result = template_filters.rel_datetime_format( - datetime(2016, 1, 1, 1, 1, 1)) - assert "Jan 01, 2016 01:01 AM" == result - - result = template_filters.rel_datetime_format( - datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy") - assert "2016" == result - - test_time = datetime.utcnow() - timedelta(hours=2) - result = template_filters.rel_datetime_format(test_time, - relative=True) - assert "2 hours ago" == result - - c.get('/?l=fr_FR') - assert session.get('locale') == 'fr_FR' - result = template_filters.rel_datetime_format( - datetime(2016, 1, 1, 1, 1, 1)) - assert "janv. 01, 2016 01:01 AM" == result - - result = template_filters.rel_datetime_format( - datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy") - assert "2016" == result - - test_time = datetime.utcnow() - timedelta(hours=2) - result = template_filters.rel_datetime_format(test_time, - relative=True) - assert "2 heures" in result - - def verify_filesizeformat(self, app): - with app.test_client() as c: - c.get('/') - assert session.get('locale') is None - assert "1 byte" == template_filters.filesizeformat(1) - assert "2 bytes" == template_filters.filesizeformat(2) - value = 1024 * 3 - assert "3 kB" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 MB" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 GB" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 TB" == template_filters.filesizeformat(value) - value *= 1024 - assert "3,072 TB" == template_filters.filesizeformat(value) - - c.get('/?l=fr_FR') - assert session.get('locale') == 'fr_FR' - assert "1 octet" == template_filters.filesizeformat(1) - assert "2 octets" == template_filters.filesizeformat(2) - value = 1024 * 3 - assert "3 ko" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 Mo" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 Go" == template_filters.filesizeformat(value) - value *= 1024 - assert "3 To" == template_filters.filesizeformat(value) - value *= 1024 - assert "072 To" in template_filters.filesizeformat(value) - - def test_filters(self): - sources = [ - 'tests/i18n/code.py', - ] - kwargs = { - 'translations_dir': config.TEMP_DIR, - 'mapping': 'tests/i18n/babel.cfg', - 'source': sources, - 'extract_update': True, - 'compile': True, - 'verbose': logging.DEBUG, - 'version': version.__version__, - } - args = argparse.Namespace(**kwargs) - manage.setup_verbosity(args) - manage.translate_messages(args) - - manage.sh(""" - pybabel init -i {d}/messages.pot -d {d} -l en_US - pybabel init -i {d}/messages.pot -d {d} -l fr_FR - """.format(d=config.TEMP_DIR)) - - fake_config = self.get_fake_config() - fake_config.SUPPORTED_LOCALES = ['en_US', 'fr_FR'] - fake_config.TRANSLATION_DIRS = config.TEMP_DIR - for app in (journalist_app.create_app(fake_config), - source_app.create_app(fake_config)): - assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES - self.verify_filesizeformat(app) - self.verify_rel_datetime_format(app) +from flask import session +from sh import pybabel +from .utils.env import TESTS_DIR + +os.environ['SECUREDROP_ENV'] = 'test' # noqa + + +def verify_rel_datetime_format(app): + with app.test_client() as c: + c.get('/') + assert session.get('locale') is None + result = template_filters.rel_datetime_format( + datetime(2016, 1, 1, 1, 1, 1)) + assert "Jan 01, 2016 01:01 AM" == result + + result = template_filters.rel_datetime_format( + datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy") + assert "2016" == result + + test_time = datetime.utcnow() - timedelta(hours=2) + result = template_filters.rel_datetime_format(test_time, + relative=True) + assert "2 hours ago" == result + + c.get('/?l=fr_FR') + assert session.get('locale') == 'fr_FR' + result = template_filters.rel_datetime_format( + datetime(2016, 1, 1, 1, 1, 1)) + assert "janv. 01, 2016 01:01 AM" == result + + result = template_filters.rel_datetime_format( + datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy") + assert "2016" == result + + test_time = datetime.utcnow() - timedelta(hours=2) + result = template_filters.rel_datetime_format(test_time, + relative=True) + assert "2 heures" in result + + +def verify_filesizeformat(app): + with app.test_client() as c: + c.get('/') + assert session.get('locale') is None + assert "1 byte" == template_filters.filesizeformat(1) + assert "2 bytes" == template_filters.filesizeformat(2) + value = 1024 * 3 + assert "3 kB" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 MB" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 GB" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 TB" == template_filters.filesizeformat(value) + value *= 1024 + assert "3,072 TB" == template_filters.filesizeformat(value) + + c.get('/?l=fr_FR') + assert session.get('locale') == 'fr_FR' + assert "1 octet" == template_filters.filesizeformat(1) + assert "2 octets" == template_filters.filesizeformat(2) + value = 1024 * 3 + assert "3 ko" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 Mo" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 Go" == template_filters.filesizeformat(value) + value *= 1024 + assert "3 To" == template_filters.filesizeformat(value) + value *= 1024 + assert "072 To" in template_filters.filesizeformat(value) + + +# We can't use fixtures because these options are set at app init time, and we +# can't modify them after. +def test_source_filters(config): + do_test(config, source_app.create_app) + + +# We can't use fixtures because these options are set at app init time, and we +# can't modify them after. +def test_journalist_filters(config): + do_test(config, journalist_app.create_app) + + +def do_test(config, create_app): + config.SUPPORTED_LOCALES = ['en_US', 'fr_FR'] + config.TRANSLATION_DIRS = config.TEMP_DIR + i18n_tool.I18NTool().main([ + '--verbose', + 'translate-messages', + '--mapping', os.path.join(TESTS_DIR, 'i18n/babel.cfg'), + '--translations-dir', config.TEMP_DIR, + '--sources', os.path.join(TESTS_DIR, 'i18n/code.py'), + '--extract-update', + '--compile', + ]) + + for l in ('en_US', 'fr_FR'): + pot = os.path.join(config.TEMP_DIR, 'messages.pot') + pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', l) + + app = create_app(config) + with app.app_context(): + db.create_all() + + assert i18n.LOCALES == config.SUPPORTED_LOCALES + verify_filesizeformat(app) + verify_rel_datetime_format(app) diff --git a/securedrop/tests/test_worker.py b/securedrop/tests/test_worker.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_worker.py @@ -0,0 +1,168 @@ +import logging +import os +import signal +import subprocess +import time + +import worker +from rq.worker import WorkerStatus + + +def layabout(): + """ + Function that just sleeps for an hour. + """ + time.sleep(3600) + + +def start_rq_worker(config, queue_name=None): + """ + Launches an rq worker process. + """ + if queue_name is None: + queue_name = config.RQ_WORKER_NAME + return subprocess.Popen( + [ + "/opt/venvs/securedrop-app-code/bin/rqworker", + "--path", + config.SECUREDROP_ROOT, + queue_name + ], + preexec_fn=os.setsid + ) + + +def test_no_interrupted_jobs(caplog): + """ + Tests requeue_interrupted_jobs when there are no interrupted jobs. + """ + caplog.set_level(logging.DEBUG) + + q = worker.create_queue() + try: + assert len(q.get_job_ids()) == 0 + worker.requeue_interrupted_jobs() + assert "No interrupted jobs found in started job registry." in caplog.text + finally: + q.delete() + + +def test_job_interruption(config, caplog): + """ + Tests that a job is requeued unless it is already being run. + """ + caplog.set_level(logging.DEBUG) + + queue_name = "test_job_interruption" + q = worker_process = None + try: + q = worker.create_queue(queue_name) + + # submit a job that sleeps for an hour + job = q.enqueue(layabout) + assert len(q.get_job_ids()) == 1 + + # launch worker processes + worker_process = start_rq_worker(config, queue_name) + + i = 0 + while i < 20: + if len(worker.rq_workers(q)) == 1: + break + time.sleep(0.1) + + assert len(worker.rq_workers(q)) == 1 + + i = 0 + while i < 20: + w = worker.worker_for_job(job.id) + if w: + break + i += 1 + time.sleep(0.1) + assert w is not None + + # the running job should not be requeued + worker.requeue_interrupted_jobs(queue_name) + skipped = "Skipping job {}, which is already being run by worker {}".format(job.id, w.key) + assert skipped in caplog.text + + # kill the process group, to kill the worker and its workhorse + os.killpg(worker_process.pid, signal.SIGKILL) + worker_process.wait() + caplog.clear() + + # after killing the worker, the interrupted job should be requeued + worker.requeue_interrupted_jobs(queue_name) + print(caplog.text) + assert "Requeuing job {}".format(job) in caplog.text + assert len(q.get_job_ids()) == 1 + finally: + q.delete() + if worker_process: + try: + os.killpg(worker_process.pid, 0) + os.killpg(worker_process.pid, signal.SIGKILL) + except OSError: + logging.debug("worker_process already gone.") + + +def test_worker_for_job(config): + """ + Tests that worker_for_job works when there are multiple workers. + """ + + queue_name = "test_worker_for_job" + q = worker_process = second_process = None + try: + q = worker.create_queue(queue_name) + assert len(worker.rq_workers(q)) == 0 + + # launch worker processes + worker_process = start_rq_worker(config, queue_name) + second_process = start_rq_worker(config, queue_name) + + i = 0 + while i < 20: + if len(worker.rq_workers(q)) == 2: + break + time.sleep(0.1) + + assert len(worker.rq_workers(q)) == 2 + + worker.rq_workers(q)[0].set_state(WorkerStatus.SUSPENDED) + + logging.debug( + [ + "{}: state={}, job={}".format(w.pid, w.get_state(), w.get_current_job_id()) + for w in worker.rq_workers(q) + ] + ) + + # submit a job that sleeps for an hour + job = q.enqueue(layabout) + + i = 0 + while i < 20: + w = worker.worker_for_job(job.id) + if w: + break + i += 1 + time.sleep(0.1) + assert w is not None + + finally: + q.delete() + if worker_process: + try: + os.killpg(worker_process.pid, 0) + os.killpg(worker_process.pid, signal.SIGKILL) + except OSError: + logging.debug("worker_process already gone.") + + if second_process: + try: + os.killpg(second_process.pid, 0) + os.killpg(second_process.pid, signal.SIGKILL) + except OSError: + logging.debug("second_process already gone.") diff --git a/securedrop/tests/utils/__init__.py b/securedrop/tests/utils/__init__.py --- a/securedrop/tests/utils/__init__.py +++ b/securedrop/tests/utils/__init__.py @@ -1,3 +1,18 @@ -import async # noqa -import db_helper # noqa -import env # noqa +# -*- coding: utf-8 -*- + +from flask import g +from pyotp import TOTP + +from . import asynchronous # noqa +from . import db_helper # noqa +from . import env # noqa + + +def login_user(app, test_user): + resp = app.post('/login', + data={'username': test_user['username'], + 'password': test_user['password'], + 'token': TOTP(test_user['otp_secret']).now()}, + follow_redirects=True) + assert resp.status_code == 200 + assert hasattr(g, 'user') # ensure logged in diff --git a/securedrop/tests/utils/api_helper.py b/securedrop/tests/utils/api_helper.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/utils/api_helper.py @@ -0,0 +1,8 @@ +def get_api_headers(token=""): + if token: + return { + "Authorization": "Token {}".format(token), + "Accept": "application/json", + "Content-Type": "application/json", + } + return {"Accept": "application/json", "Content-Type": "application/json"} diff --git a/securedrop/tests/utils/async.py b/securedrop/tests/utils/asynchronous.py similarity index 94% rename from securedrop/tests/utils/async.py rename to securedrop/tests/utils/asynchronous.py --- a/securedrop/tests/utils/async.py +++ b/securedrop/tests/utils/asynchronous.py @@ -4,6 +4,7 @@ """ import time +# This is an arbitarily defined value in the SD codebase and not something from rqworker REDIS_SUCCESS_RETURN_VALUE = 'success' diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -2,33 +2,40 @@ """Testing utilities that involve database (and often related filesystem) interaction. """ +import datetime import mock import os +from flask import current_app + os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -import crypto_util -import db -import store +from sdconfig import config +import models + +from db import db -# db.{Journalist, Reply} +# models.{Journalist, Reply} -def init_journalist(is_admin=False): +def init_journalist(first_name=None, last_name=None, is_admin=False): """Initialize a journalist into the database. Return their - :class:`db.Journalist` object and password string. + :class:`models.Journalist` object and password string. :param bool is_admin: Whether the user is an admin. - :returns: A 2-tuple. The first entry, an :obj:`db.Journalist` + :returns: A 2-tuple. The first entry, an :obj:`models.Journalist` corresponding to the row just added to the database. The second, their password string. """ - username = crypto_util.genrandomid() - user_pw = crypto_util.genrandomid() - user = db.Journalist(username, user_pw, is_admin) - db.db_session.add(user) - db.db_session.commit() + username = current_app.crypto_util.genrandomid() + user_pw = current_app.crypto_util.genrandomid() + user = models.Journalist(username=username, + password=user_pw, + first_name=first_name, + last_name=last_name, + is_admin=is_admin) + db.session.add(user) + db.session.commit() return user, user_pw @@ -36,14 +43,14 @@ def reply(journalist, source, num_replies): """Generates and submits *num_replies* replies to *source* from *journalist*. Returns reply objects as a list. - :param db.Journalist journalist: The journalist to write the + :param models.Journalist journalist: The journalist to write the reply from. - :param db.Source source: The source to send the reply to. + :param models.Source source: The source to send the reply to. :param int num_replies: Number of random-data replies to make. - :returns: A list of the :class:`db.Reply`s submitted. + :returns: A list of the :class:`models.Reply`s submitted. """ assert num_replies >= 1 replies = [] @@ -51,17 +58,17 @@ def reply(journalist, source, num_replies): source.interaction_count += 1 fname = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) - crypto_util.encrypt(str(os.urandom(1)), - [ - crypto_util.getkey(source.filesystem_id), - config.JOURNALIST_KEY - ], - store.path(source.filesystem_id, fname)) - reply = db.Reply(journalist, source, fname) + current_app.crypto_util.encrypt( + str(os.urandom(1)), + [current_app.crypto_util.getkey(source.filesystem_id), + config.JOURNALIST_KEY], + current_app.storage.path(source.filesystem_id, fname)) + + reply = models.Reply(journalist, source, fname) replies.append(reply) - db.db_session.add(reply) + db.session.add(reply) - db.db_session.commit() + db.session.commit() return replies @@ -72,7 +79,7 @@ def mock_verify_token(testcase): :param unittest.TestCase testcase: The test case for which to patch TOTP verification. """ - patcher = mock.patch('db.Journalist.verify_token') + patcher = mock.patch('models.Journalist.verify_token') testcase.addCleanup(patcher.stop) testcase.mock_journalist_verify_token = patcher.start() testcase.mock_journalist_verify_token.return_value = True @@ -81,33 +88,33 @@ def mock_verify_token(testcase): def mark_downloaded(*submissions): """Mark *submissions* as downloaded in the database. - :param db.Submission submissions: One or more submissions that + :param models.Submission submissions: One or more submissions that should be marked as downloaded. """ for submission in submissions: submission.downloaded = True - db.db_session.commit() + db.session.commit() -# db.{Source,Submission} +# models.{Source,Submission} def init_source_without_keypair(): """Initialize a source: create their database record and the filesystem directory that stores their submissions & replies. Return a source object and their codename string. - :returns: A 2-tuple. The first entry, the :class:`db.Source` + :returns: A 2-tuple. The first entry, the :class:`models.Source` initialized. The second, their codename string. """ # Create source identity and database record - codename = crypto_util.genrandomid() - filesystem_id = crypto_util.hash_codename(codename) - journalist_filename = crypto_util.display_id() - source = db.Source(filesystem_id, journalist_filename) - db.db_session.add(source) - db.db_session.commit() + codename = current_app.crypto_util.genrandomid() + filesystem_id = current_app.crypto_util.hash_codename(codename) + journalist_filename = current_app.crypto_util.display_id() + source = models.Source(filesystem_id, journalist_filename) + db.session.add(source) + db.session.commit() # Create the directory to store their submissions and replies - os.mkdir(store.path(source.filesystem_id)) + os.mkdir(current_app.storage.path(source.filesystem_id)) return source, codename @@ -118,50 +125,53 @@ def init_source(): and their GPG key encrypted with their codename. Return a source object and their codename string. - :returns: A 2-tuple. The first entry, the :class:`db.Source` + :returns: A 2-tuple. The first entry, the :class:`models.Source` initialized. The second, their codename string. """ source, codename = init_source_without_keypair() - crypto_util.genkeypair(source.filesystem_id, codename) + current_app.crypto_util.genkeypair(source.filesystem_id, codename) return source, codename def submit(source, num_submissions): """Generates and submits *num_submissions* - :class:`db.Submission`s on behalf of a :class:`db.Source` + :class:`models.Submission`s on behalf of a :class:`models.Source` *source*. - :param db.Source source: The source on who's behalf to make + :param models.Source source: The source on who's behalf to make submissions. :param int num_submissions: Number of random-data submissions to make. - :returns: A list of the :class:`db.Submission`s submitted. + :returns: A list of the :class:`models.Submission`s submitted. """ assert num_submissions >= 1 + source.last_updated = datetime.datetime.utcnow() + db.session.add(source) submissions = [] for _ in range(num_submissions): source.interaction_count += 1 - fpath = store.save_message_submission(source.filesystem_id, - source.interaction_count, - source.journalist_filename, - str(os.urandom(1))) - submission = db.Submission(source, fpath) + source.pending = False + fpath = current_app.storage.save_message_submission( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + str(os.urandom(1)) + ) + submission = models.Submission(source, fpath) submissions.append(submission) - db.db_session.add(submission) + db.session.add(source) + db.session.add(submission) - db.db_session.commit() + db.session.commit() return submissions def new_codename(client, session): """Helper function to go through the "generate codename" flow. """ - # clear the session because our tests have implicit reliance on each other - session.clear() - client.get('/generate') codename = session['codename'] client.post('/create') diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py --- a/securedrop/tests/utils/env.py +++ b/securedrop/tests/utils/env.py @@ -1,22 +1,28 @@ # -*- coding: utf-8 -*- """Testing utilities related to setup and teardown of test environment. """ +import io import os -from os.path import abspath, dirname, exists, isdir, join, realpath import shutil -import subprocess import threading +from distutils.version import StrictVersion +from os.path import abspath +from os.path import dirname +from os.path import isdir +from os.path import join +from os.path import realpath -import gnupg +import pretty_bad_protocol as gnupg +from db import db +from sdconfig import config os.environ['SECUREDROP_ENV'] = 'test' # noqa -import config -import crypto_util -from db import init_db, db_session -FILES_DIR = abspath(join(dirname(realpath(__file__)), '..', 'files')) -# TODO: the PID file for the redis worker is hard-coded below. Ideally this +TESTS_DIR = abspath(join(dirname(realpath(__file__)), '..')) +FILES_DIR = join(TESTS_DIR, 'files') + +# The PID file for the redis worker is hard-coded below. Ideally this # constant would be provided by a test harness. It has been intentionally # omitted from `config.py.example` in order to isolate the test vars from prod # vars. When refactoring the test suite, the test_worker_pidfile @@ -37,11 +43,24 @@ def init_gpg(): """Initialize the GPG keyring and import the journalist key for testing. """ - gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR) + + # gpg 2.1+ requires gpg-agent, see #4013 + gpg_agent_config = os.path.join(config.GPG_KEY_DIR, 'gpg-agent.conf') + with open(gpg_agent_config, 'w+') as f: + f.write('allow-loopback-pinentry') + + gpg_binary = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR) + if StrictVersion(gpg_binary.binary_version) >= StrictVersion('2.1'): + gpg = gnupg.GPG(binary='gpg2', + homedir=config.GPG_KEY_DIR, + options=['--pinentry-mode loopback']) + else: + gpg = gpg_binary + # Faster to import a pre-generated key than to gen a new one every time. for keyfile in (join(FILES_DIR, "test_journalist_key.pub"), join(FILES_DIR, "test_journalist_key.sec")): - gpg.import_keys(open(keyfile).read()) + gpg.import_keys(io.open(keyfile).read()) return gpg @@ -49,9 +68,7 @@ def setup(): """Set up the file system, GPG, and database.""" create_directories() init_gpg() - init_db() - # Do tests that should always run on app startup - crypto_util.do_runtime_tests() + db.create_all() def teardown(): @@ -61,11 +78,16 @@ def teardown(): for t in threading.enumerate(): if t.is_alive() and not isinstance(t, threading._MainThread): t.join() - db_session.remove() - shutil.rmtree(config.TEMP_DIR) + db.session.remove() + try: + shutil.rmtree(config.TEMP_DIR) + except OSError: + # Then check the directory was already deleted + assert not os.path.exists(config.TEMP_DIR) try: shutil.rmtree(config.SECUREDROP_DATA_ROOT) - assert not os.path.exists(config.SECUREDROP_DATA_ROOT) # safeguard for #844 + # safeguard for #844 + assert not os.path.exists(config.SECUREDROP_DATA_ROOT) except OSError as exc: - if 'No such file or directory' not in exc: + if 'No such file or directory' != exc.strerror: raise diff --git a/securedrop/tests/utils/instrument.py b/securedrop/tests/utils/instrument.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/utils/instrument.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +""" +Taken from: flask_testing.utils + +Flask unittest integration. + +:copyright: (c) 2010 by Dan Jacob. +:license: BSD, see LICENSE for more details. +""" + + +from urllib.parse import urlparse, urljoin + +import pytest + +from flask import template_rendered, message_flashed + + +__all__ = ['InstrumentedApp'] + + +class ContextVariableDoesNotExist(Exception): + pass + + +class InstrumentedApp: + + def __init__(self, app): + self.app = app + + def __enter__(self): + self.templates = [] + self.flashed_messages = [] + template_rendered.connect(self._add_template) + message_flashed.connect(self._add_flash_message) + return self + + def __exit__(self, *nargs): + if getattr(self, 'app', None) is not None: + del self.app + + del self.templates[:] + del self.flashed_messages[:] + + template_rendered.disconnect(self._add_template) + message_flashed.disconnect(self._add_flash_message) + + def _add_flash_message(self, app, message, category): + self.flashed_messages.append((message, category)) + + def _add_template(self, app, template, context): + if len(self.templates) > 0: + self.templates = [] + self.templates.append((template, context)) + + def assert_message_flashed(self, message, category='message'): + """ + Checks if a given message was flashed. + + :param message: expected message + :param category: expected message category + """ + for _message, _category in self.flashed_messages: + if _message == message and _category == category: + return True + + raise AssertionError("Message '{}' in category '{}' wasn't flashed" + .format(message, category)) + + def assert_template_used(self, name, tmpl_name_attribute='name'): + """ + Checks if a given template is used in the request. If the template + engine used is not Jinja2, provide ``tmpl_name_attribute`` with a + value of its `Template` class attribute name which contains the + provided ``name`` value. + + :param name: template name + :param tmpl_name_attribute: template engine specific attribute name + """ + used_templates = [] + + for template, context in self.templates: + if getattr(template, tmpl_name_attribute) == name: + return True + + used_templates.append(template) + + raise AssertionError("Template {} not used. Templates were used: {}" + .format(name, ' '.join(repr(used_templates)))) + + def get_context_variable(self, name): + """ + Returns a variable from the context passed to the template. + + Raises a ContextVariableDoesNotExist exception if does not exist in + context. + + :param name: name of variable + """ + for template, context in self.templates: + if name in context: + return context[name] + raise ContextVariableDoesNotExist + + def assert_context(self, name, value, message=None): + """ + Checks if given name exists in the template context + and equals the given value. + + :versionadded: 0.2 + :param name: name of context variable + :param value: value to check against + """ + + try: + assert self.get_context_variable(name) == value, message + except ContextVariableDoesNotExist: + pytest.fail(message or + "Context variable does not exist: {}".format(name)) + + def assert_redirects(self, response, location, message=None): + """ + Checks if response is an HTTP redirect to the + given location. + + :param response: Flask response + :param location: relative URL path to SERVER_NAME or an absolute URL + """ + parts = urlparse(location) + + if parts.netloc: + expected_location = location + else: + server_name = self.app.config.get('SERVER_NAME') or 'localhost.localdomain' + expected_location = urljoin("http://%s" % server_name, location) + + valid_status_codes = (301, 302, 303, 305, 307) + valid_status_code_str = ', '.join([str(code) + for code in valid_status_codes]) + not_redirect = "HTTP Status {} expected but got {}" \ + .format(valid_status_code_str, response.status_code) + assert (response.status_code in (valid_status_codes, message) + or not_redirect) + assert response.location == expected_location, message diff --git a/testinfra/ansible/test_validate_users.py b/testinfra/ansible/test_validate_users.py deleted file mode 100644 --- a/testinfra/ansible/test_validate_users.py +++ /dev/null @@ -1,19 +0,0 @@ -import pytest -import os - - [email protected](reason="Validation not fully implemented yet") [email protected]('username', [ - 'root', - 'amnesia', -]) -def test_validate_users(LocalCommand, username): - """ - Check that Ansible halts execution of the playbook if the Admin - username is set to any disallowed value. - """ - var_override = "--tags validate --extra-vars ssh_users={}".format(username) - os.environ['ANSIBLE_ARGS'] = var_override - c = LocalCommand("vagrant provision /staging/") - - assert c.rc != 0 diff --git a/testinfra/app-code/test_redis_worker.py b/testinfra/app-code/test_redis_worker.py deleted file mode 100644 --- a/testinfra/app-code/test_redis_worker.py +++ /dev/null @@ -1,44 +0,0 @@ -import pytest -import re - - -securedrop_test_vars = pytest.securedrop_test_vars - - [email protected]('config_line', [ - '[program:securedrop_worker]', - 'command=/usr/local/bin/rqworker', - "directory={}".format(securedrop_test_vars.securedrop_code), - 'autostart=true', - 'autorestart=true', - 'startretries=3', - 'stderr_logfile=/var/log/securedrop_worker/err.log', - 'stdout_logfile=/var/log/securedrop_worker/out.log', - "user={}".format(securedrop_test_vars.securedrop_user), - 'environment=HOME="/tmp/python-gnupg"', -]) -def test_redis_worker_configuration(File, config_line): - """ - Ensure SecureDrop Redis worker config for supervisor service - management is configured correctly. - """ - f = File('/etc/supervisor/conf.d/securedrop_worker.conf') - # Config lines may have special characters such as [] which will - # throw off the regex matching, so let's escape those chars. - regex = re.escape(config_line) - assert f.contains('^{}$'.format(regex)) - - -def test_redis_worker_config_file(File): - """ - Ensure SecureDrop Redis worker config for supervisor service - management has proper ownership and mode. - - Using separate test so that the parametrization doesn't rerun - the file mode checks, which would be useless. - """ - f = File('/etc/supervisor/conf.d/securedrop_worker.conf') - assert f.is_file - assert oct(f.mode) == '0644' - assert f.user == "root" - assert f.group == "root" diff --git a/testinfra/app/test_apparmor.py b/testinfra/app/test_apparmor.py deleted file mode 100644 --- a/testinfra/app/test_apparmor.py +++ /dev/null @@ -1,119 +0,0 @@ -import pytest - - -sdvars = pytest.securedrop_test_vars - - [email protected]('pkg', ['apparmor', 'apparmor-utils']) -def test_apparmor_pkg(Package, pkg): - """ Apparmor package dependencies """ - assert Package(pkg).is_installed - - -def test_apparmor_enabled(Command, Sudo): - """ Check that apparmor is enabled """ - with Sudo(): - assert Command("aa-status --enabled").rc == 0 - - -apache2_capabilities = [ - 'dac_override', - 'kill', - 'net_bind_service', - 'sys_ptrace' - ] - - [email protected]('cap', apache2_capabilities) -def test_apparmor_apache_capabilities(Command, cap): - """ check for exact list of expected app-armor capabilities for apache2 """ - c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' " - "/etc/apparmor.d/usr.sbin.apache2") - assert cap in c.stdout - - -def test_apparmor_apache_exact_capabilities(Command): - """ ensure no extra capabilities are defined for apache2 """ - c = Command.check_output("grep -ic capability " - "/etc/apparmor.d/usr.sbin.apache2") - assert str(len(apache2_capabilities)) == c - - -tor_capabilities = ['setgid'] - - [email protected]('cap', tor_capabilities) -def test_apparmor_tor_capabilities(Command, cap): - """ check for exact list of expected app-armor capabilities for tor """ - c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && " - "say $1\' /etc/apparmor.d/usr.sbin.tor") - assert cap in c.stdout - - -def test_apparmor_tor_exact_capabilities(Command): - """ ensure no extra capabilities are defined for tor """ - c = Command.check_output("grep -ic capability " - "/etc/apparmor.d/usr.sbin.tor") - assert str(len(tor_capabilities)) == c - - -enforced_profiles = [ - 'ntpd', - 'apache2', - 'tcpdump', - 'tor'] - - [email protected]('profile', enforced_profiles) -def test_apparmor_ensure_not_disabled(File, Sudo, profile): - """ Explicitly check that enforced profiles are NOT in - /etc/apparmor.d/disable - Polling aa-status only checks the last config that was loaded, - this ensures it wont be disabled on reboot. - """ - f = File("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile)) - with Sudo(): - assert not f.exists - - [email protected]('complain_pkg', sdvars.apparmor_complain) -def test_app_apparmor_complain(Command, Sudo, complain_pkg): - """ Ensure app-armor profiles are in complain mode for staging """ - with Sudo(): - awk = ("awk '/[0-9]+ profiles.*complain." - "/{flag=1;next}/^[0-9]+.*/{flag=0}flag'") - c = Command.check_output("aa-status | {}".format(awk)) - assert complain_pkg in c - - -def test_app_apparmor_complain_count(Command, Sudo): - """ Ensure right number of app-armor profiles are in complain mode """ - with Sudo(): - c = Command.check_output("aa-status --complaining") - assert c == str(len(sdvars.apparmor_complain)) - - [email protected]('aa_enforced', sdvars.apparmor_enforce) -def test_apparmor_enforced(Command, Sudo, aa_enforced): - awk = ("awk '/[0-9]+ profiles.*enforce./" - "{flag=1;next}/^[0-9]+.*/{flag=0}flag'") - with Sudo(): - c = Command.check_output("aa-status | {}".format(awk)) - assert aa_enforced in c - - -def test_apparmor_total_profiles(Command, Sudo): - """ Ensure number of total profiles is sum of enforced and - complaining profiles """ - with Sudo(): - total_expected = str((len(sdvars.apparmor_enforce) - + len(sdvars.apparmor_complain))) - assert Command.check_output("aa-status --profiled") == total_expected - - -def test_aastatus_unconfined(Command, Sudo): - """ Ensure that there are no processes that are unconfined but have - a profile """ - unconfined_chk = "0 processes are unconfined but have a profile defined" - with Sudo(): - assert unconfined_chk in Command("aa-status").stdout diff --git a/testinfra/app/test_appenv.py b/testinfra/app/test_appenv.py deleted file mode 100644 --- a/testinfra/app/test_appenv.py +++ /dev/null @@ -1,80 +0,0 @@ -import pytest - -sdvars = pytest.securedrop_test_vars - - [email protected]('exp_pip_pkg', sdvars.pip_deps) -def test_app_pip_deps(PipPackage, exp_pip_pkg): - """ Ensure pip dependencies are installed """ - pip = PipPackage.get_packages() - assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version'] - - -def test_app_wsgi(File, Sudo): - """ ensure logging is enabled for source interface in staging """ - f = File("/var/www/source.wsgi") - with Sudo(): - assert f.is_file - assert oct(f.mode) == "0640" - assert f.user == 'www-data' - assert f.group == 'www-data' - assert f.contains("^import logging$") - assert f.contains("^logging\.basicConfig(stream=sys\.stderr)$") - - -def test_pidfile(File): - """ ensure there are no pid files """ - assert not File('/tmp/journalist.pid').exists - assert not File('/tmp/source.pid').exists - - [email protected]('app_dir', sdvars.app_directories) -def test_app_directories(File, Sudo, app_dir): - """ ensure securedrop app directories exist with correct permissions """ - f = File(app_dir) - with Sudo(): - assert f.is_directory - assert f.user == sdvars.securedrop_user - assert f.group == sdvars.securedrop_user - assert oct(f.mode) == "0700" - - -def test_app_code_pkg(Package): - """ ensure securedrop-app-code package is installed """ - assert Package("securedrop-app-code").is_installed - - -def test_gpg_key_in_keyring(Command, Sudo): - """ ensure test gpg key is present in app keyring """ - with Sudo(sdvars.securedrop_user): - c = Command("gpg --homedir /var/lib/securedrop/keys " - "--list-keys 28271441") - assert "pub 4096R/28271441 2013-10-12" in c.stdout - - -def test_ensure_logo(File, Sudo): - """ ensure default logo header file exists """ - f = File("{}/static/i/logo.png".format(sdvars.securedrop_code)) - with Sudo(): - assert oct(f.mode) == "0644" - assert f.user == sdvars.securedrop_user - assert f.group == sdvars.securedrop_user - - -def test_securedrop_tmp_clean_cron(Command, Sudo): - """ Ensure securedrop tmp clean cron job in place """ - with Sudo(): - cronlist = Command("crontab -l").stdout - cronjob = "@daily {}/manage.py clean-tmp".format( - sdvars.securedrop_code) - assert cronjob in cronlist - - -def test_app_workerlog_dir(File, Sudo): - """ ensure directory for worker logs is present """ - f = File('/var/log/securedrop_worker') - with Sudo(): - assert f.is_directory - assert f.user == "root" - assert f.group == "root" - assert oct(f.mode) == "0644" diff --git a/testinfra/common/test_system_hardening.py b/testinfra/common/test_system_hardening.py deleted file mode 100644 --- a/testinfra/common/test_system_hardening.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -import pytest -import re - -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - - [email protected]('sysctl_opt', [ - ('net.ipv4.conf.all.accept_redirects', 0), - ('net.ipv4.conf.all.accept_source_route', 0), - ('net.ipv4.conf.all.rp_filter', 1), - ('net.ipv4.conf.all.secure_redirects', 0), - ('net.ipv4.conf.all.send_redirects', 0), - ('net.ipv4.conf.default.accept_redirects', 0), - ('net.ipv4.conf.default.accept_source_route', 0), - ('net.ipv4.conf.default.rp_filter', 1), - ('net.ipv4.conf.default.secure_redirects', 0), - ('net.ipv4.conf.default.send_redirects', 0), - ('net.ipv4.icmp_echo_ignore_broadcasts', 1), - ('net.ipv4.ip_forward', 0), - ('net.ipv4.tcp_max_syn_backlog', 4096), - ('net.ipv4.tcp_syncookies', 1), - ('net.ipv6.conf.all.disable_ipv6', 1), - ('net.ipv6.conf.default.disable_ipv6', 1), - ('net.ipv6.conf.lo.disable_ipv6', 1), -]) -def test_sysctl_options(Sysctl, Sudo, sysctl_opt): - """ - Ensure sysctl flags are set correctly. Most of these checks - are disabling IPv6 and hardening IPv4, which is appropriate - due to the heavy use of Tor. - """ - with Sudo(): - assert Sysctl(sysctl_opt[0]) == sysctl_opt[1] - - -def test_dns_setting(File): - """ - Ensure DNS service is hard-coded in resolv.conf config. - """ - f = File('/etc/resolvconf/resolv.conf.d/base') - assert f.is_file - assert f.user == "root" - assert f.group == "root" - assert oct(f.mode) == "0644" - assert f.contains('^nameserver 8\.8\.8\.8$') - - [email protected]('kernel_module', [ - 'bluetooth', - 'iwlwifi', -]) -def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module): - """ - Test that unwanted kernel modules are blacklisted on the system. - Mostly these checks are defense-in-depth approaches to ensuring - that wireless interfaces will not work. - """ - with Sudo(): - assert kernel_module not in Command("lsmod").stdout - - f = File("/etc/modprobe.d/blacklist.conf") - assert f.contains("^blacklist {}$".format(kernel_module)) - - [email protected](hostenv.startswith('mon'), - reason="Monitor Server does not have swap disabled yet.") -def test_swap_disabled(Command): - """ - Ensure swap space is disabled. Prohibit writing memory to swapfiles - to reduce the threat of forensic analysis leaking any sensitive info. - """ - c = Command.check_output('swapon --summary') - # A leading slash will indicate full path to a swapfile. - assert not re.search("^/", c, re.M) - # Expect that ONLY the headers will be present in the output. - rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority") - assert re.search(rgx, c) diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py deleted file mode 100644 --- a/testinfra/common/test_user_config.py +++ /dev/null @@ -1,84 +0,0 @@ -import os -import pytest -import re - -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - - -def test_sudoers_config(File, Sudo): - """ - Check sudoers config for passwordless sudo via group membership, - as well as environment-related hardening. - """ - f = File("/etc/sudoers") - assert f.is_file - assert f.user == "root" - assert f.group == "root" - assert oct(f.mode) == "0440" - - # Restrictive file mode requires sudo for reading, so let's - # read once and store the content in a var. - with Sudo(): - sudoers_config = f.content - - # Using re.search rather than `f.contains` since the basic grep - # matching doesn't support PCRE, so `\s` won't work. - assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M) - assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M) - assert re.search('^Defaults\s+mail_badpass$', sudoers_config, re.M) - assert re.search('Defaults\s+secure_path="/usr/local/sbin:' - '/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', - sudoers_config, re.M) - assert re.search('^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', - sudoers_config, re.M) - assert re.search('Defaults:%sudo\s+!requiretty', sudoers_config, re.M) - - -def test_sudoers_tmux_env(File): - """ - Ensure SecureDrop-specific bashrc additions are present. - This checks for automatic tmux start on interactive shells. - If we switch to byobu, we can set `byobu-enabled` and check - the corresponding settings there. - """ - - f = File('/etc/profile.d/securedrop_additions.sh') - non_interactive_str = re.escape('[[ $- != *i* ]] && return') - tmux_check = re.escape('test -z "$TMUX" && (tmux attach ||' - ' tmux new-session)') - - assert f.contains("^{}$".format(non_interactive_str)) - assert f.contains("^if which tmux >\/dev\/null 2>&1; then$") - - assert 'test -z "$TMUX" && (tmux attach || tmux new-session)' in f.content - assert f.contains(tmux_check) - - -def test_tmux_installed(Package): - """ - Ensure the `tmux` package is present, since it's required for the user env. - When running an interactive SSH session over Tor, tmux should be started - automatically, to prevent problems if the connection is broken - unexpectedly, as sometimes happens over Tor. The Admin will be able to - reconnect to the running tmux session and review command output. - """ - assert Package("tmux").is_installed - - [email protected](hostenv == 'travis', - reason="Bashrc tests dont make sense on Travis") -def test_sudoers_tmux_env_deprecated(File): - """ - Previous version of the Ansible config set the tmux config - in per-user ~/.bashrc, which was redundant. The config has - since moved to /etc/profile.d, to provide a single point of - update that applies to all users. Let's make sure that the - old setting isn't still active. - """ - - admin_user = "vagrant" - if os.environ.get("FPF_CI", None): - admin_user = "sdrop" - - f = File("/home/{}/.bashrc".format(admin_user)) - assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$") diff --git a/testinfra/development/test_development_application_settings.py b/testinfra/development/test_development_application_settings.py deleted file mode 100644 --- a/testinfra/development/test_development_application_settings.py +++ /dev/null @@ -1,129 +0,0 @@ -import pytest -import os - -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - -sd_test_vars = pytest.securedrop_test_vars - - [email protected]('package', [ - "securedrop-app-code", - "apache2-mpm-worker", - "libapache2-mod-wsgi", - "libapache2-mod-xsendfile", -]) -def test_development_lacks_deb_packages(Command, package): - """ - The development machine does not use Apache, but rather the Flask runner, - for standing up dev-friendly servers inside the VM. Therefore the - app-code-related deb packages should be absent. - """ - # The TestInfra `Package` module doesn't offer state=absent checks, - # so let's call `dpkg -l` and inspect that output. - c = Command("dpkg -l {}".format(package)) - assert c.rc == 1 - assert c.stdout == "" - stderr = c.stderr.rstrip() - assert stderr == "dpkg-query: no packages found matching {}".format( - package) - - -def test_development_apparmor_no_complain_mode(Command, Sudo): - """ - Ensure that AppArmor profiles are not set to complain mode in development. - The app-staging host sets profiles to complain, viz. - - * usr.sbin.apache2 - * usr.sbin.tor - - but those changes should not land on the development machine. - """ - - with Sudo(): - c = Command("aa-status") - if hostenv == "travis": - assert c.rc == 3 - assert 'apparmor filesystem is not mounted' in c.stderr - else: - assert c.rc == 0 - assert '0 profiles are in complain mode.' in c.stdout - - [email protected]('unwanted_file', [ - "/var/www/html", - "/var/www/source.wsgi", - "/var/www/document.wsgi", -]) -def test_development_apache_docroot_absent(File, unwanted_file): - """ - Ensure the default HTML document root is missing. - Development environment does not serve out of /var/www, - since it uses the Flask dev server, not Apache. - """ - f = File(unwanted_file) - assert not f.exists - - [email protected]('data_dir', [ - "/var/lib/securedrop", - "/var/lib/securedrop/keys", - "/var/lib/securedrop/tmp", - "/var/lib/securedrop/store", -]) -def test_development_data_directories_exist(File, data_dir): - """ - Ensure that application code directories are created - under /vagrant for the development environment, rather than - /var/www as in staging and prod. - """ - f = File(data_dir) - assert f.is_directory - assert f.user == sd_test_vars.securedrop_user - assert f.group == sd_test_vars.securedrop_user - assert oct(f.mode) == "0700" - - -def test_development_app_directories_exist(File): - """ - Ensure that application code directories are created - under /vagrant for the development environment, rather than - /var/www as in staging and prod. - - Using a separate check from the data directories because /vagrant - will be mounted with different mode. - """ - f = File(sd_test_vars.securedrop_code) - assert f.is_directory - assert f.user == sd_test_vars.securedrop_user - assert f.group == sd_test_vars.securedrop_user - - -def test_development_clean_tmp_cron_job(Command, Sudo): - """ - Ensure cron job for cleaning the temporary directory for the app code - exists. Also, ensure that the older format for the cron job is absent, - since we updated manage.py subcommands to use hyphens instead of - underscores (e.g. `clean_tmp` -> `clean-tmp`). - """ - - with Sudo(): - c = Command.check_output('crontab -l') - assert "@daily {}/manage.py clean-tmp".format( - sd_test_vars.securedrop_code) in c - assert "@daily {}/manage.py clean_tmp".format( - sd_test_vars.securedrop_code) not in c - assert "clean_tmp".format(sd_test_vars.securedrop_code) not in c - # Make sure that the only cron lines are a comment and the actual job. - # We don't want any duplicates. - assert len(c.split("\n")) == 2 - - -def test_development_default_logo_exists(File): - """ - Checks for default SecureDrop logo file. - """ - - f = File("{}/static/i/logo.png".format(sd_test_vars.securedrop_code)) - assert f.is_file - assert f.user == sd_test_vars.securedrop_user - assert f.group == sd_test_vars.securedrop_user diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py deleted file mode 100644 --- a/testinfra/development/test_development_environment.py +++ /dev/null @@ -1,71 +0,0 @@ -import pytest -import getpass - - -def test_development_app_dependencies(Package): - """ - Ensure development apt dependencies are installed. - """ - development_apt_dependencies = [ - 'libssl-dev', - 'ntp', - 'python-dev', - 'python-pip', - ] - for dependency in development_apt_dependencies: - p = Package(dependency) - assert p.is_installed - - [email protected]('pip_package,version', [ - ('Flask-Testing', '0.7.1'), - ('Flask', '0.12.2'), - ('Jinja2', '2.10'), - ('MarkupSafe', '1.0'), - ('Werkzeug', '0.12.2'), - ('beautifulsoup4', '4.6.0'), - ('click', '6.7'), - ('coverage', '4.4.2'), - ('first', '2.0.1'), - ('funcsigs', '1.0.2'), - ('itsdangerous', '0.24'), - ('mock', '2.0.0'), - ('pbr', '3.1.1'), - ('pip-tools', '1.11.0'), - ('py', '1.5.2'), - ('pytest-cov', '2.5.1'), - ('pytest', '3.3.2'), - ('selenium', '2.53.6'), - ('six', '1.11.0'), -]) -def test_development_pip_dependencies(Command, Sudo, pip_package, version): - """ - Declare SecureDrop app pip requirements. On the development VM, - the pip dependencies should be installed directly via pip, rather - than relying on the deb packages with pip-wheel inclusions. - Versions here are intentionally hardcoded to track changes. - """ - # Using elevated privileges to list the Python packages, since - # the playbooks use sudo to install the pip packages system-wide. - # In Travis, lack of sudo here hides a number of dependencies. - with Sudo(): - c = Command('pip freeze') - assert "{}=={}".format(pip_package, version) in c.stdout.rstrip() - - [email protected](getpass.getuser() != 'vagrant', - reason="vagrant bashrc checks dont make sense in CI") -def test_development_securedrop_env_var(File): - """ - Ensure that the SECUREDROP_ENV var is set to "dev". - - - TODO: this isn't really checking that the env var is set, - just that it's declared in the bashrc. spec_helper ignores - env vars via ssh by default, so start there. - """ - - f = File('/home/vagrant/.bashrc') - assert f.is_file - assert f.user == 'vagrant' - assert f.contains('^export SECUREDROP_ENV=dev$') diff --git a/testinfra/development/test_development_networking.py b/testinfra/development/test_development_networking.py deleted file mode 100644 --- a/testinfra/development/test_development_networking.py +++ /dev/null @@ -1,62 +0,0 @@ -import pytest -import os - -hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] - - [email protected](hostenv == 'travis', - reason="Custom networking in Travis") -def test_development_iptables_rules(Command, Sudo): - """ - Declare desired iptables rules - The 'development' machine doesn't have any custom - iptables rules, so just check for the default chains. - """ - desired_iptables_rules = [ - '-P INPUT ACCEPT', - '-P FORWARD ACCEPT', - '-P OUTPUT ACCEPT', - ] - with Sudo(): - c = Command.check_output('iptables -S') - for rule in desired_iptables_rules: - assert rule in c - - # If any iptables rules are ever added, this test will - # fail, so tests can be written for the new rules. - # Counting newlines in the output simply to avoid calling - # `iptables -S` again and piping to `wc -l`. - assert c.count("\n") == len(desired_iptables_rules) - 1 - - -def test_development_ssh_listening(Socket): - """ - Check for ssh listening on all interfaces. In prod environment, - SSH will be listening only on localhost, i.e. SSH over ATHS. - """ - s = Socket("tcp://0.0.0.0:22") - assert s.is_listening - - -def test_development_redis_worker(Socket): - """ - Ensure that Redis worker is listening on localhost. - This worker is used to handle incoming submissions. - """ - - s = Socket("tcp://127.0.0.1:6379") - assert s.is_listening - -# The Flask runners for the source and journalist interfaces -# aren't configured to run by default, e.g. on boot. Nor -# do the app tests cause them to be run. So, we shouldn't -# really expected them to be running. -# check for source interface flask port listening -# describe port(8080) do -# it { should be_listening.on('0.0.0.0').with('tcp') } -# end -# -# check for journalist interface flask port listening -# describe port(8081) do -# it { should be_listening.on('0.0.0.0').with('tcp') } -# end diff --git a/testinfra/development/test_xvfb.py b/testinfra/development/test_xvfb.py deleted file mode 100644 --- a/testinfra/development/test_xvfb.py +++ /dev/null @@ -1,128 +0,0 @@ -import os - - -def test_xvfb_is_installed(Package): - """ - Ensure apt requirements for Xvfb are present. - """ - assert Package("xvfb").is_installed - - -def test_firefox_is_installed(Package, Command): - """ - The app test suite requires a very specific version of Firefox, for - compatibility with Selenium. Make sure to check the explicit - version of Firefox, not just that any version of Firefox is installed. - - In Travis, the Firefox installation is handled via the travis.yml - file, and so it won't show as installed via dpkg. - """ - if "TRAVIS" not in os.environ: - p = Package("firefox") - assert p.is_installed - - c = Command("firefox --version") - # Reminder: the rstrip is only necessary for local-context actions, - # e.g. in Travis, but it's a fine practice in all contexts. - assert c.stdout.rstrip() == "Mozilla Firefox 46.0.1" - - -def test_xvfb_service_config(File, Sudo): - """ - Ensure xvfb service configuration file is present. - Using Sudo context manager because the expected mode is 700. - Not sure it's really necessary to have this script by 700; 755 - sounds sufficient. - """ - with Sudo(): - f = File("/etc/init.d/xvfb") - assert f.is_file - assert oct(f.mode) == "0700" - assert f.user == "root" - assert f.group == "root" - # Let's hardcode the entire init script and check for exact match. - # The pytest output will display a diff if anything is missing. - xvfb_init_content = """ -# This is the /etc/init.d/xvfb script. We use it to launch xvfb at boot in the -# development environment so we can easily run the functional tests. - -XVFB=/usr/bin/Xvfb -XVFBARGS=":1 -screen 0 1024x768x24 -ac +extension GLX +render -noreset" -PIDFILE=/var/run/xvfb.pid -case "$1" in - start) - echo -n "Starting virtual X frame buffer: Xvfb" - start-stop-daemon --start --quiet --pidfile $PIDFILE --make-pidfile --background --exec $XVFB -- $XVFBARGS - echo "." - ;; - stop) - echo -n "Stopping virtual X frame buffer: Xvfb" - start-stop-daemon --stop --quiet --pidfile $PIDFILE - echo "." - ;; - restart) - $0 stop - $0 start - ;; - *) - echo "Usage: /etc/init.d/xvfb {start|stop|restart}" - exit 1 -esac - -exit 0 -""".lstrip().rstrip() # noqa - with Sudo(): - assert f.contains('^XVFB=/usr/bin/Xvfb$') - assert f.contains('^XVFBARGS=":1 -screen 0 1024x768x24 ' - '-ac +extension GLX +render -noreset"$') - assert f.content.rstrip() == xvfb_init_content - - -def test_xvfb_service_enabled(Command, Sudo): - """ - Ensure xvfb is configured to start on boot via update-rc.d. - The `-n` option to update-rc.d is dry-run. - - Using Sudo context manager because the service file is mode 700. - Not sure it's really necessary to have this script by 700; 755 - sounds sufficient. - """ - with Sudo(): - c = Command('update-rc.d -n xvfb defaults') - assert c.rc == 0 - wanted_text = 'System start/stop links for /etc/init.d/xvfb already exist.' - assert wanted_text in c.stdout - - -def test_xvfb_display_config(File): - """ - Ensure DISPLAY environment variable is set on boot, for running - headless tests via Xvfb. - """ - f = File('/etc/profile.d/xvfb_display.sh') - assert f.is_file - assert oct(f.mode) == "0444" - assert f.user == "root" - assert f.group == "root" - assert f.contains("export DISPLAY=:1\n") - - -def test_xvfb_service_running(Process, Sudo): - """ - Ensure that xvfb service is running. - - We can't use the Service module because it expects a "status" - subcommand for the init script, and our custom version doesn't have - one. So let's make sure the process is running. - """ - # Sudo isn't necessary to read out of /proc on development, but is - # required when running under Grsecurity, which app-staging does. - # So let's escalate privileges to ensure we can determine service state. - with Sudo(): - p = Process.get(user="root", comm="Xvfb") - wanted_args = str('/usr/bin/Xvfb :1 -screen 0 1024x768x24 ' - '-ac +extension GLX +render -noreset') - assert p.args == wanted_args - # We only expect a single process, no children. - workers = Process.filter(ppid=p.pid) - assert len(workers) == 0 diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py deleted file mode 100644 --- a/testinfra/mon/test_ossec.py +++ /dev/null @@ -1,183 +0,0 @@ -import os -import pytest - - -securedrop_test_vars = pytest.securedrop_test_vars - - [email protected]('package', [ - 'mailutils', - 'ossec-server', - 'postfix', - 'procmail', - 'securedrop-ossec-server', -]) -def test_ossec_package(Package, package): - """ - Ensure required packages for OSSEC are installed. - Includes mail utilities and the FPF-maintained metapackage. - """ - assert Package(package).is_installed - - -def test_ossec_connectivity(Command, Sudo): - """ - Ensure ossec-server machine has active connection to the ossec-agent. - The ossec service will report all available agents, and we can inspect - that list to make sure it's the host we expect. - """ - desired_output = "{}-{} is available.".format( - securedrop_test_vars.app_hostname, - os.environ.get('APP_IP', securedrop_test_vars.app_ip)) - with Sudo(): - c = Command.check_output("/var/ossec/bin/list_agents -a") - assert c == desired_output - - -def test_ossec_gnupg_homedir(File, Sudo): - """ ensure ossec gpg homedir exists """ - with Sudo(): - f = File("/var/ossec/.gnupg") - assert f.is_directory - assert f.user == "ossec" - assert oct(f.mode) == "0700" - - -# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] -def test_ossec_gnupg(File, Sudo): - """ - Ensures the test Admin GPG public key is present as file. - Does not check that it's added to the keyring for the ossec user; - that's handled by a separate test. - """ - with Sudo(): - f = File("/var/ossec/test_admin_key.pub") - assert f.is_file - assert oct(f.mode) == "0644" - - -def test_ossec_pubkey_in_keyring(Command, Sudo): - """ - Ensure the test Admin GPG public key exists in the keyring - within the ossec home directory. - """ - ossec_gpg_pubkey_info = """pub 4096R/EDDDC102 2014-10-15 -uid Test/Development (DO NOT USE IN PRODUCTION) (Admin's OSSEC Alert GPG key) <[email protected]> -sub 4096R/97D2EB39 2014-10-15""" # noqa - with Sudo("ossec"): - c = Command.check_output("gpg --homedir /var/ossec/.gnupg " - "--list-keys EDDDC102") - assert c == ossec_gpg_pubkey_info - - -# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] [email protected]('keyfile', [ - '/var/ossec/etc/sslmanager.key', - '/var/ossec/etc/sslmanager.cert', -]) -def test_ossec_keyfiles(File, Sudo, keyfile): - """ - Ensure that the OSSEC transport key pair exists. These keys are used - to protect the connection between the ossec-server and ossec-agent. - - All this check does in confirm they're present, it doesn't perform any - matching checks to validate the configuration. - """ - with Sudo(): - f = File(keyfile) - assert f.is_file - # The postinst scripts in the OSSEC deb packages set 440 on the - # keyfiles; the Ansible config should be updated to do the same. - assert oct(f.mode) == "0440" - assert f.user == "root" - assert f.group == "ossec" - - [email protected]('setting', [ - 'VERBOSE=yes', - 'MAILDIR=/var/mail/', - 'DEFAULT=$MAILDIR', - 'LOGFILE=/var/log/procmail.log', - 'SUBJECT=`formail -xSubject:`', - ':0 c', - '*^To:.*root.*', - '|/var/ossec/send_encrypted_alarm.sh', -]) -def test_procmail_settings(File, Sudo, setting): - """ - Ensure procmail settings are correct. These config lines determine - how the OSSEC email alerts are encrypted and then passed off for sending. - """ - # Sudo is required to traverse the /var/ossec directory. - with Sudo(): - f = File("/var/ossec/.procmailrc") - assert f.contains('^{}$'.format(setting)) - - -# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] -def test_procmail_attrs(File, Sudo): - """ - Ensure procmail file attributes are specified correctly. - """ - with Sudo(): - f = File("/var/ossec/.procmailrc") - assert f.is_file - assert f.user == "ossec" - assert oct(f.mode) == "0440" - - -# Permissions don't match between Ansible and OSSEC deb packages postinst. [email protected] -def test_procmail_log(File, Sudo): - """ - Ensure procmail log file exist with proper ownership. - Only the ossec user should have read/write permissions. - """ - with Sudo(): - f = File("/var/log/procmail.log") - assert f.is_file - assert f.user == "ossec" - assert f.group == "root" - assert oct(f.mode) == "0660" - - -def test_ossec_authd(Command, Sudo): - """ Ensure that authd is not running """ - with Sudo(): - c = Command("pgrep ossec-authd") - assert c.stdout == "" - assert c.rc != 0 - - -def test_hosts_files(File, SystemInfo): - """ Ensure host files mapping are in place """ - f = File('/etc/hosts') - - app_ip = os.environ.get('APP_IP', securedrop_test_vars.app_ip) - app_host = securedrop_test_vars.app_hostname - - assert f.contains('^127.0.0.1.*localhost') - assert f.contains('^{}\s*{}$'.format(app_ip, app_host)) - - -def test_ossec_log_contains_no_malformed_events(File, Sudo): - """ - Ensure the OSSEC log reports no errors for incorrectly formatted - messages. These events indicate that the OSSEC server failed to decrypt - the event sent by the OSSEC agent, which implies a misconfiguration, - likely the IPv4 address or keypair differing from what's declared. - - Documentation regarding this error message can be found at: - http://ossec-docs.readthedocs.io/en/latest/faq/unexpected.html#id4 - """ - with Sudo(): - f = File("/var/ossec/logs/ossec.log") - assert not f.contains("ERROR: Incorrectly formated message from") - - -def test_regression_hosts(Command): - """ Regression test to check for duplicate entries. """ - assert Command.check_output("uniq --repeated /etc/hosts") == "" diff --git a/testinfra/mon/test_postfix.py b/testinfra/mon/test_postfix.py deleted file mode 100644 --- a/testinfra/mon/test_postfix.py +++ /dev/null @@ -1,96 +0,0 @@ -import re -import pytest - - -securedrop_test_vars = pytest.securedrop_test_vars - - [email protected]('header', [ - '/^X-Originating-IP:/ IGNORE', - '/^X-Mailer:/ IGNORE', - '/^Mime-Version:/ IGNORE', - '/^User-Agent:/ IGNORE', - '/^Received:/ IGNORE', -]) -def test_postfix_headers(File, header): - """ - Ensure postfix header filters are set correctly. Common mail headers - are stripped by default to avoid leaking metadata about the instance. - Message body is always encrypted prior to sending. - """ - f = File("/etc/postfix/header_checks") - assert f.is_file - assert oct(f.mode) == "0644" - regex = '^{}$'.format(re.escape(header)) - assert re.search(regex, f.content, re.M) - - [email protected]('setting', [ - 'relayhost = [smtp.gmail.com]:587', - 'smtp_sasl_auth_enable = yes', - 'smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd', - 'smtp_sasl_security_options = noanonymous', - 'smtp_use_tls = yes', - 'smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache', - 'smtp_tls_security_level = secure', - 'smtp_tls_CApath = /etc/ssl/certs', - 'smtp_tls_ciphers = high', - 'smtp_tls_protocols = TLSv1.2 TLSv1.1 TLSv1 !SSLv3 !SSLv2', - 'myhostname = ossec.server', - 'myorigin = $myhostname', - 'smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)', - 'biff = no', - 'append_dot_mydomain = no', - 'readme_directory = no', - 'smtp_header_checks = regexp:/etc/postfix/header_checks', - 'mailbox_command = /usr/bin/procmail', - 'inet_interfaces = loopback-only', - 'alias_maps = hash:/etc/aliases', - 'alias_database = hash:/etc/aliases', - 'mydestination = $myhostname, localhost.localdomain , localhost', - 'mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128', - 'mailbox_size_limit = 0', - 'recipient_delimiter = +', -]) -def test_postfix_settings(File, setting): - """ - Check all postfix configuration lines. There are technically multiple - configuration paths regarding the TLS settings, particularly the - fingerprint verification logic, but only the base default config is tested - currently. - """ - f = File("/etc/postfix/main.cf") - assert f.is_file - assert f.user == 'root' - assert oct(f.mode) == "0644" - regex = '^{}$'.format(re.escape(setting)) - assert re.search(regex, f.content, re.M) - - -def test_postfix_generic_maps(File): - """ - Regression test to check that generic Postfix maps are not configured - by default. As of #1565 Admins can opt-in to overriding the FROM address - used for sending OSSEC alerts, but by default we're preserving the old - `[email protected]` behavior, to avoid breaking email for previously - existing instances. - """ - assert not File("/etc/postfix/generic").exists - assert not File("/etc/postfix/main.cf").contains("^smtp_generic_maps") - - -def test_postfix_service(Service, Socket, Sudo): - """ - Check Postfix service. Postfix is used to deliver OSSEC alerts via - encrypted email. On staging hosts, Postfix is disabled, due to lack - of SASL authentication credentials, but on prod hosts it should run. - """ - # Elevated privileges are required to read Postfix service info, - # specifically `/var/spool/postfix/pid/master.pid`. - with Sudo(): - postfix = Service("postfix") - assert postfix.is_running == securedrop_test_vars.postfix_enabled - assert postfix.is_enabled == securedrop_test_vars.postfix_enabled - - socket = Socket("tcp://127.0.0.1:25") - assert socket.is_listening == securedrop_test_vars.postfix_enabled diff --git a/testinfra/test.py b/testinfra/test.py deleted file mode 100755 --- a/testinfra/test.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python -""" -Wrapper script for running Testinfra against SecureDrop VMs. -Accepts a single argument: the hostname to run the tests against. -Script will handle building the list of tests to run, based on hostname. -""" -import os -import subprocess -import sys -import tempfile - -# By default let's assume we're testing against the development VM. -try: - target_host = sys.argv[1] -except IndexError: - target_host = "development" - -# Set env var so that `testinfra/conftest.py` can read in a YAML vars file -# specific to the host being tested. -os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = target_host - - -def get_target_roles(target_host): - """ - Assemble list of role tests to run. Hard-coded per host. - """ - target_roles = {"development": ['testinfra/app-code', - 'testinfra/development'], - "app-staging": ['testinfra/app', - 'testinfra/app-code', - 'testinfra/common', - 'testinfra/development/test_xvfb.py'], - "mon-staging": ['testinfra/mon', - 'testinfra/common'], - "mon-prod": ['testinfra/mon']} - - try: - return target_roles[target_host] - except KeyError: - print("Unknown host '{}'! Exiting.".format(target_host)) - sys.exit(1) - - -def run_testinfra(target_host, verbose=True): - """ - Handler for executing testinfra against `target_host`. - Queries list of roles via helper def `get_target_roles`. - """ - conn_type = "ssh" - target_roles = get_target_roles(target_host) - if verbose: - # Print informative output prior to test run. - print("Running Testinfra suite against '{}'...".format(target_host)) - print("Target roles:") - for role in target_roles: - print(" - {}".format(role)) - - # Prod hosts host have SSH access over Tor. Let's use the SSH backend - # for Testinfra, rather than Ansible. When we write a dynamic inventory - # script for Ansible SSH-over-Tor, we can use the Ansible backend - # everywhere. - if target_host.endswith("-prod"): - os.environ['SECUREDROP_SSH_OVER_TOR'] = '1' - # Dump SSH config to tempfile so it can be passed as arg to testinfra. - ssh_config_output = subprocess.check_output(["vagrant", "ssh-config", - target_host]) - # Create temporary file to store ssh-config. Not deleting it - # automatically because there's no sensitive info (HidServAuth is - # required to connect), and we'll need it outside of the - # context-manager block that writes to it. - ssh_config_tmpfile = tempfile.NamedTemporaryFile(delete=False) - with ssh_config_tmpfile.file as f: - f.write(ssh_config_output) - ssh_config_path = ssh_config_tmpfile.name - testinfra_command_template = """ -testinfra \ - -vv \ - -n auto \ - --connection ssh \ - --ssh-config \ - {ssh_config_path}\ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - elif os.environ.get("FPF_CI", 'false') == 'true': - if os.environ.get("CI_SD_ENV", "development") == "development": - os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = "travis" - ssh_config_path = "" - testinfra_command_template = "testinfra -vv {target_roles}" - else: - ssh_config_path = os.environ["CI_SSH_CONFIG"] - testinfra_command_template = """ -testinfra \ - -vv \ - -n 8 \ - --connection {connection_type} \ - --ssh-config \ - {ssh_config_path}\ - --junit-xml=./{target_host}-results.xml\ - --junit-prefix={target_host}\ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - else: - ssh_config_path = "" - testinfra_command_template = """ -testinfra \ - -vv \ - -n auto \ - --connection ansible \ - --ansible-inventory \ - .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory \ - --hosts {target_host} \ - {target_roles} -""".lstrip().rstrip() - - testinfra_command = testinfra_command_template.format( - target_host=target_host, - ssh_config_path=ssh_config_path, - connection_type=conn_type, - target_roles=" ".join(target_roles), - ).split() - - # Execute config tests. - subprocess.check_call(testinfra_command) - - -if __name__ == "__main__": - run_testinfra(target_host)
Python3 admin virtualenv is broken if improperly set up (doesn't contain pip3) ## Description Initially discovered while reviewing https://github.com/freedomofpress/securedrop/pull/4927#issuecomment-543763957 , if the admin virtualenv is improperly setup, one must manually remove `admin/.venv3` to fix the virtualenv. ## Steps to Reproduce This is very hard to reproduce, but you can do it by pressing ctrl+c when the virtualenv is being setup as part of an initial `./securedrop-admin setup` run. Subsequent `./securedrop-admin setup` invocations will no longer work ## Expected Behavior `./securedrop-admin setup` should not fail ## Actual Behavior `./securedrop-admin setup` fails with the following output: ``` amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin setup INFO: Virtualenv already exists, not creating INFO: Checking Python dependencies for securedrop-admin ERROR: Failed to install pip dependencies. Check network connection and try again. ```
2020-04-21T20:43:26Z
[]
[]
freedomofpress/securedrop
5,257
freedomofpress__securedrop-5257
[ "5233" ]
1346be8c5617091dcff4757a389db6e5ab807c20
diff --git a/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py b/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py @@ -0,0 +1,32 @@ +"""add Source.deleted_at + +Revision ID: 35513370ba0d +Revises: 523fff3f969c +Create Date: 2020-05-06 22:28:01.214359 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '35513370ba0d' +down_revision = '523fff3f969c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sources', schema=None) as batch_op: + batch_op.add_column(sa.Column('deleted_at', sa.DateTime(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sources', schema=None) as batch_op: + batch_op.drop_column('deleted_at') + + # ### end Alembic commands ### diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -4,6 +4,7 @@ import pretty_bad_protocol as gnupg import os import io +import re import scrypt from random import SystemRandom @@ -76,6 +77,8 @@ class CryptoUtil: REDIS_FINGERPRINT_HASH = "sd/crypto-util/fingerprints" REDIS_KEY_HASH = "sd/crypto-util/keys" + SOURCE_KEY_UID_RE = re.compile(r"(Source|Autogenerated) Key <[-A-Za-z0-9+/=_]+>") + def __init__(self, scrypt_params, scrypt_id_pepper, @@ -213,23 +216,52 @@ def genkeypair(self, name, secret): key_length=self.__gpg_key_length, passphrase=secret, name_email=name, + name_real="Source Key", creation_date=self.DEFAULT_KEY_CREATION_DATE.isoformat(), expire_date=self.DEFAULT_KEY_EXPIRATION_DATE )) return genkey_obj + def find_source_key(self, fingerprint: str) -> typing.Optional[typing.Dict]: + """ + Searches the GPG keyring for a source key. + + A source key has the given fingerprint and is labeled either + "Source Key" or "Autogenerated Key". + + Returns the key or None. + """ + keys = self.gpg.list_keys() + for key in keys: + if fingerprint != key["fingerprint"]: + continue + + for uid in key["uids"]: + if self.SOURCE_KEY_UID_RE.match(uid): + return key + else: + return None + return None + def delete_reply_keypair(self, source_filesystem_id): - key = self.get_fingerprint(source_filesystem_id) + fingerprint = self.get_fingerprint(source_filesystem_id) + # If this source was never flagged for review, they won't have a reply # keypair - if not key: + if not fingerprint: return + # verify that the key with the given fingerprint belongs to a source + key = self.find_source_key(fingerprint) + if not key: + raise ValueError("source key not found") + # Always delete keys without invoking pinentry-mode = loopback # see: https://lists.gnupg.org/pipermail/gnupg-users/2016-May/055965.html temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + # The subkeys keyword argument deletes both secret and public keys. - temp_gpg.delete_keys(key, secret=True, subkeys=True) + temp_gpg.delete_keys(fingerprint, secret=True, subkeys=True) self.redis.hdel(self.REDIS_KEY_HASH, self.get_fingerprint(source_filesystem_id)) self.redis.hdel(self.REDIS_FINGERPRINT_HASH, source_filesystem_id) diff --git a/securedrop/journalist.py b/securedrop/journalist.py --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -15,7 +15,7 @@ def prime_keycache(): Preloads CryptoUtil.keycache. """ with app.app_context(): - for source in Source.query.filter_by(pending=False).all(): + for source in Source.query.filter_by(pending=False, deleted_at=None).all(): app.crypto_util.get_pubkey(source.filesystem_id) diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -135,7 +135,7 @@ def get_token(): @api.route('/sources', methods=['GET']) @token_required def get_all_sources(): - sources = Source.query.filter_by(pending=False).all() + sources = Source.query.filter_by(pending=False, deleted_at=None).all() return jsonify( {'sources': [source.to_json() for source in sources]}), 200 diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -41,7 +41,12 @@ def col(filesystem_id): def delete_single(filesystem_id): """deleting a single collection from its /col page""" source = get_source(filesystem_id) - delete_collection(filesystem_id) + try: + delete_collection(filesystem_id) + except ValueError as e: + current_app.logger.error("error deleting collection: %s", e) + abort(500) + flash(gettext("{source_name}'s collection deleted") .format(source_name=source.journalist_designation), "notification") diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py --- a/securedrop/journalist_app/forms.py +++ b/securedrop/journalist_app/forms.py @@ -3,7 +3,7 @@ from flask_babel import lazy_gettext as gettext from flask_wtf import FlaskForm from flask_wtf.file import FileField, FileAllowed, FileRequired -from wtforms import (TextAreaField, TextField, BooleanField, HiddenField, +from wtforms import (TextAreaField, StringField, BooleanField, HiddenField, ValidationError) from wtforms.validators import InputRequired, Optional @@ -38,16 +38,16 @@ def name_length_validation(form, field): class NewUserForm(FlaskForm): - username = TextField('username', validators=[ + username = StringField('username', validators=[ InputRequired(message=gettext('This field is required.')), minimum_length_validation ]) - first_name = TextField('first_name', validators=[name_length_validation, Optional()]) - last_name = TextField('last_name', validators=[name_length_validation, Optional()]) + first_name = StringField('first_name', validators=[name_length_validation, Optional()]) + last_name = StringField('last_name', validators=[name_length_validation, Optional()]) password = HiddenField('password') is_admin = BooleanField('is_admin') is_hotp = BooleanField('is_hotp') - otp_secret = TextField('otp_secret', validators=[ + otp_secret = StringField('otp_secret', validators=[ otp_secret_validation, Optional() ]) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -64,7 +64,7 @@ def index(): # Long SQLAlchemy statements look best when formatted according to # the Pocoo style guide, IMHO: # http://www.pocoo.org/internal/styleguide/ - sources = Source.query.filter_by(pending=False) \ + sources = Source.query.filter_by(pending=False, deleted_at=None) \ .filter(Source.last_updated.isnot(None)) \ .order_by(Source.last_updated.desc()) \ .all() @@ -171,7 +171,7 @@ def bulk(): @view.route('/download_unread/<filesystem_id>') def download_unread_filesystem_id(filesystem_id): id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions = Submission.query.filter( Submission.source_id == id, Submission.downloaded == false()).all() diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- import binascii +import datetime +import os -from datetime import datetime from flask import (g, flash, current_app, abort, send_file, redirect, url_for, render_template, Markup, sessions, request) from flask_babel import gettext, ngettext @@ -53,11 +54,17 @@ def commit_account_changes(user): flash(gettext("Account updated."), "success") -def get_source(filesystem_id): - """Return a Source object, representing the database row, for the source - with the `filesystem_id`""" +def get_source(filesystem_id, include_deleted=False): + """ + Return the Source object with `filesystem_id` + + If `include_deleted` is False, only sources with a null `deleted_at` will + be returned. + """ source = None query = Source.query.filter(Source.filesystem_id == filesystem_id) + if not include_deleted: + query = query.filter_by(deleted_at=None) source = get_one_or_else(query, current_app.logger, abort) return source @@ -157,7 +164,7 @@ def download(zip_basename, submissions): zf = current_app.storage.get_bulk_archive(submissions, zip_directory=zip_basename) attachment_filename = "{}--{}.zip".format( - zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) + zip_basename, datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) # Mark the submissions that have been downloaded as such for submission in submissions: @@ -233,8 +240,11 @@ def col_delete(cols_selected): if len(cols_selected) < 1: flash(gettext("No collections selected for deletion."), "error") else: - for filesystem_id in cols_selected: - delete_collection(filesystem_id) + now = datetime.datetime.utcnow() + sources = Source.query.filter(Source.filesystem_id.in_(cols_selected)) + sources.update({Source.deleted_at: now}, synchronize_session="fetch") + db.session.commit() + num = len(cols_selected) flash(ngettext('{num} collection deleted', '{num} collections deleted', num).format(num=num), @@ -259,17 +269,36 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions path = current_app.storage.path(filesystem_id) - current_app.storage.move_to_shredder(path) + if os.path.exists(path): + current_app.storage.move_to_shredder(path) # Delete the source's reply keypair - current_app.crypto_util.delete_reply_keypair(filesystem_id) + try: + current_app.crypto_util.delete_reply_keypair(filesystem_id) + except ValueError as e: + current_app.logger.error("could not delete reply keypair: %s", e) + raise # Delete their entry in the db - source = get_source(filesystem_id) + source = get_source(filesystem_id, include_deleted=True) db.session.delete(source) db.session.commit() +def purge_deleted_sources(): + """ + Deletes all Sources with a non-null `deleted_at` attribute. + """ + sources = Source.query.filter(Source.deleted_at.isnot(None)).order_by(Source.deleted_at).all() + if sources: + current_app.logger.info("Purging deleted sources (%s)", len(sources)) + for source in sources: + try: + delete_collection(source.filesystem_id) + except Exception as e: + current_app.logger.error("Error deleting source %s: %s", source.uuid, e) + + def set_name(user, first_name, last_name): try: user.set_name(first_name, last_name) @@ -312,7 +341,7 @@ def col_download_unread(cols_selected): submissions = [] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions += Submission.query.filter( Submission.downloaded == false(), Submission.source_id == id).all() @@ -328,7 +357,7 @@ def col_download_all(cols_selected): submissions = [] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions += Submission.query.filter( Submission.source_id == id).all() return download("all", submissions) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -73,6 +73,9 @@ class Source(db.Model): # keep track of how many interactions have happened, for filenames interaction_count = Column(Integer, default=0, nullable=False) + # when deletion of the source was requested + deleted_at = Column(DateTime) + # Don't create or bother checking excessively long codenames to prevent DoS NUM_WORDS = 7 MAX_CODENAME_LEN = 128 diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -165,6 +165,7 @@ def setup_g(): try: g.source = Source.query \ .filter(Source.filesystem_id == g.filesystem_id) \ + .filter_by(deleted_at=None) \ .one() except NoResultFound as e: app.logger.error(
diff --git a/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py b/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py @@ -0,0 +1,48 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_source_deleter_service(host): + """ + Verify configuration of securedrop_source_deleter systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_source_deleter.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop Source deleter", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/source_deleter --interval 10".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_source_deleter") + assert s.is_enabled + assert s.is_running diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -119,7 +119,7 @@ def config(tmpdir): def alembic_config(config): base_dir = path.join(path.dirname(__file__), '..') migrations_dir = path.join(base_dir, 'alembic') - ini = configparser.SafeConfigParser() + ini = configparser.ConfigParser() ini.read(path.join(base_dir, 'alembic.ini')) ini.set('alembic', 'script_location', path.join(migrations_dir)) diff --git a/securedrop/tests/migrations/migration_35513370ba0d.py b/securedrop/tests/migrations/migration_35513370ba0d.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_35513370ba0d.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- + +import random +from uuid import uuid4 + +from db import db +from journalist_app import create_app +import sqlalchemy +import pytest + +from .helpers import bool_or_none, random_bool, random_chars, random_datetime + + +class UpgradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.add_source() + self.valid_source_id = 1 + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + "uuid": str(uuid4()), + "filesystem_id": filesystem_id, + "journalist_designation": random_chars(50), + "flagged": bool_or_none(), + "last_updated": random_datetime(nullable=True), + "pending": bool_or_none(), + "interaction_count": random.randint(0, 1000), + } + sql = """ + INSERT INTO sources ( + uuid, filesystem_id, journalist_designation, flagged, last_updated, + pending, interaction_count + ) VALUES ( + :uuid, :filesystem_id, :journalist_designation, :flagged, :last_updated, + :pending, :interaction_count + ) + """ + + db.engine.execute(sqlalchemy.text(sql), **params) + + def check_upgrade(self): + """ + Check the new `deleted_at` column + + Querying `deleted_at` shouldn't cause an error, and no source + should already have it set. + """ + with self.app.app_context(): + sources = db.engine.execute( + sqlalchemy.text("SELECT * FROM sources WHERE deleted_at IS NOT NULL") + ).fetchall() + assert len(sources) == 0 + + +class DowngradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + pass + + def check_downgrade(self): + """ + After downgrade, using `deleted_at` in a query should raise an exception + """ + with self.app.app_context(): + with pytest.raises(sqlalchemy.exc.OperationalError): + sources = db.engine.execute( + sqlalchemy.text( + "SELECT * FROM sources WHERE deleted_at IS NOT NULL" + ) + ).fetchall() + assert len(sources) == 0 diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -295,6 +295,17 @@ def test_delete_reply_keypair_no_key(source_app): source_app.crypto_util.delete_reply_keypair('Reality Winner') +def test_delete_reply_keypair_non_source(source_app): + """ + Checks that a non-source key is not deleted by delete_reply_keypair. + """ + name = "SecureDrop Test/Development (DO NOT USE IN PRODUCTION)" + with pytest.raises(ValueError) as excinfo: + source_app.crypto_util.delete_reply_keypair(name) + assert "source key not found" in str(excinfo.value) + assert source_app.crypto_util.get_fingerprint(name) + + def test_get_fingerprint(source_app, test_source): assert (source_app.crypto_util.get_fingerprint(test_source['filesystem_id']) is not None) diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -552,6 +552,9 @@ def test_delete_collections(mocker, journalist_app, source_app, test_journo): assert "{} collections deleted".format(num_sources) in text assert async_genkey.called + # simulate the source_deleter's work + journalist_app_module.utils.purge_deleted_sources() + # Make sure the collections are deleted from the filesystem def assertion(): assert not ( diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1330,7 +1330,7 @@ def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin): test_admin['otp_secret']) # Create 1px * 1px 'white' PNG file from its base64 string form = journalist_app_module.forms.LogoForm( - logo=(BytesIO(base64.decodestring + logo=(BytesIO(base64.decodebytes (b"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQ" b"VR42mP8/x8AAwMCAO+ip1sAAAAASUVORK5CYII=")), 'test.png') ) @@ -2067,6 +2067,8 @@ def test_col_process_successfully_deletes_multiple_sources(journalist_app, utils.db_helper.submit(source_1, 1) source_2, _ = utils.db_helper.init_source() utils.db_helper.submit(source_2, 1) + source_3, _ = utils.db_helper.init_source() + utils.db_helper.submit(source_3, 1) with journalist_app.test_client() as app: _login_user(app, test_journo['username'], test_journo['password'], @@ -2081,9 +2083,13 @@ def test_col_process_successfully_deletes_multiple_sources(journalist_app, assert resp.status_code == 200 - # Verify there are no remaining sources + # simulate the source_deleter's work + journalist_app_module.utils.purge_deleted_sources() + + # Verify that all of the specified sources were deleted, but no others remaining_sources = Source.query.all() - assert not remaining_sources + assert len(remaining_sources) == 1 + assert remaining_sources[0].uuid == source_3.uuid def test_col_process_successfully_stars_sources(journalist_app, diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -14,6 +14,7 @@ import version from db import db +from journalist_app.utils import delete_collection from models import InstanceConfig, Source, Reply from source_app import main as source_app_main from source_app import api as source_app_api @@ -653,11 +654,7 @@ def test_source_is_deleted_while_logged_in(source_app): # Now the journalist deletes the source filesystem_id = source_app.crypto_util.hash_codename(codename) - source_app.crypto_util.delete_reply_keypair(filesystem_id) - source = Source.query.filter_by( - filesystem_id=filesystem_id).one() - db.session.delete(source) - db.session.commit() + delete_collection(filesystem_id) # Source attempts to continue to navigate resp = app.post(url_for('main.lookup'), follow_redirects=True) @@ -668,8 +665,8 @@ def test_source_is_deleted_while_logged_in(source_app): assert 'codename' not in session logger.assert_called_once_with( - "Found no Sources when one was expected: " - "No row was found for one()") + "Found no Sources when one was expected: No row was found for one()" + ) def test_login_with_invalid_codename(source_app):
Deleting large numbers of sources can fail such that is it thereafter impossible to delete the sources ## Description In the journalist interface, if you select a large number of sources and delete them, the operation can take longer than the Apache timeout. Some sources' store directories will have been moved to the shredder before the failure. Thereafter, if you try to delete them, a `ValueError` will be thrown at line 246 of `store.py`, in `move_to_shredder`. The unhandled exception prevents the deletion of the source record, and this is how we get the zombie apocalypse. ## Steps to Reproduce In an environment using Apache (staging, prod VMs, QA hardware): - Add 500 sources with `qa_loader.py --source-count 500`. - Log in as a journalist. - Select all sources and click delete. ## Expected Behavior That the sources would be deleted without error. ## Actual Behavior You get a gateway timeout. Navigating back to the source list and trying again results in an internal server error, with a stacktrace in `/var/log/apache2/journalist-error.log`. ## Comments The fix might be as simple as checking for the existence of the source's store directory in `journalist_app.utils.delete_collection` and only calling `move_to_shredder` if it still exists. While there, the key pair deletion should be checked as well, so that if it's already gone, the source database record is still deleted.
Since it's not a regression, agreement today was to undertake a timeboxed (~2 hour) attempt to resolve for 1.3.0; if it ends up being more complex, will likely bump to 1.4.0. To no one's surprise, it turned out to be more complex. When the original deletion request times out, the mod_wsgi process is still seeing it to completion, because it doesn't care that Apache has given up. Any workaround to press on past nonexistent source store directories or keypairs, can still fail when the source is queried in the final steps of `journalist_app.utils.delete_collection`, and has been deleted. A journalist could keep backing up to the source list, refreshing, selecting all and deleting, and it might look like their efforts are making headway as the list shrinks, but in fact they'll just be tripping over the failures while the initial request is still churning through the deletions. We could lengthen the Apache timeout, or introduce a [request timeout](https://modwsgi.readthedocs.io/en/develop/configuration-directives/WSGIDaemonProcess.html) in the mod_wsgi configuration, but neither is a sure fix, and could introduce other problems. The right thing to do is ensure we generate a response to this request in a reasonable timeframe. To that end, I'm going to look at adding a `deleted` flag to `Source`. Updating that should be quick. Another background worker process will periodically scan for deleted sources and do the work that is currently done in `delete_collection`. For now, we'll just omit deleted sources from the journalist interface. For the sake of clarity, per chat w/ Jen, we'll keep this on the sprint for John to continue to investigate, during the 5/6-5/20 sprint, but we're still keeping it on the 1.4.0 (not 1.3.0) milestone, so it is not subject to the release/QA timeline pressure.
2020-05-13T20:49:31Z
[]
[]
freedomofpress/securedrop
5,262
freedomofpress__securedrop-5262
[ "5176", "5111" ]
8b8c8e1e5814bc28cfa6a539516ae61d9f892101
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -83,7 +83,7 @@ def validate(self, document): class ValidateIP(Validator): def validate(self, document): - if re.match(r'((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', + if re.match(r'((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', # lgtm [py/regex/unmatchable-dollar] # noqa: E501 document.text): return True raise ValidationError( @@ -244,150 +244,150 @@ def __init__(self, args): translations = " ".join(translations) self.desc = [ ['ssh_users', 'sd', str, - u'Username for SSH access to the servers', + 'Username for SSH access to the servers', SiteConfig.ValidateUser(), None, lambda config: True], ['daily_reboot_time', 4, int, - u'Daily reboot time of the server (24-hour clock)', + 'Daily reboot time of the server (24-hour clock)', SiteConfig.ValidateTime(), int, lambda config: True], ['app_ip', '10.20.2.2', str, - u'Local IPv4 address for the Application Server', + 'Local IPv4 address for the Application Server', SiteConfig.ValidateIP(), None, lambda config: True], ['monitor_ip', '10.20.3.2', str, - u'Local IPv4 address for the Monitor Server', + 'Local IPv4 address for the Monitor Server', SiteConfig.ValidateIP(), None, lambda config: True], ['app_hostname', 'app', str, - u'Hostname for Application Server', + 'Hostname for Application Server', SiteConfig.ValidateNotEmpty(), None, lambda config: True], ['monitor_hostname', 'mon', str, - u'Hostname for Monitor Server', + 'Hostname for Monitor Server', SiteConfig.ValidateNotEmpty(), None, lambda config: True], ['dns_server', '8.8.8.8', str, - u'DNS server specified during installation', + 'DNS server specified during installation', SiteConfig.ValidateNotEmpty(), None, lambda config: True], ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str, - u'Local filepath to public key for ' + 'Local filepath to public key for ' + 'SecureDrop Application GPG public key', SiteConfig.ValidatePath(self.args.ansible_path), None, lambda config: True], ['securedrop_app_https_on_source_interface', False, bool, - u'Whether HTTPS should be enabled on ' + 'Whether HTTPS should be enabled on ' + 'Source Interface (requires EV cert)', SiteConfig.ValidateYesNo(), lambda x: x.lower() == 'yes', lambda config: True], ['securedrop_app_https_certificate_cert_src', '', str, - u'Local filepath to HTTPS certificate', + 'Local filepath to HTTPS certificate', SiteConfig.ValidateOptionalPath(self.args.ansible_path), None, lambda config: config.get( 'securedrop_app_https_on_source_interface')], ['securedrop_app_https_certificate_key_src', '', str, - u'Local filepath to HTTPS certificate key', + 'Local filepath to HTTPS certificate key', SiteConfig.ValidateOptionalPath(self.args.ansible_path), None, lambda config: config.get( 'securedrop_app_https_on_source_interface')], ['securedrop_app_https_certificate_chain_src', '', str, - u'Local filepath to HTTPS certificate chain file', + 'Local filepath to HTTPS certificate chain file', SiteConfig.ValidateOptionalPath(self.args.ansible_path), None, lambda config: config.get( 'securedrop_app_https_on_source_interface')], ['securedrop_app_gpg_fingerprint', '', str, - u'Full fingerprint for the SecureDrop Application GPG Key', + 'Full fingerprint for the SecureDrop Application GPG Key', SiteConfig.ValidateFingerprint(), self.sanitize_fingerprint, lambda config: True], ['ossec_alert_gpg_public_key', 'ossec.pub', str, - u'Local filepath to OSSEC alerts GPG public key', + 'Local filepath to OSSEC alerts GPG public key', SiteConfig.ValidatePath(self.args.ansible_path), None, lambda config: True], ['ossec_gpg_fpr', '', str, - u'Full fingerprint for the OSSEC alerts GPG public key', + 'Full fingerprint for the OSSEC alerts GPG public key', SiteConfig.ValidateFingerprint(), self.sanitize_fingerprint, lambda config: True], ['ossec_alert_email', '', str, - u'Admin email address for receiving OSSEC alerts', + 'Admin email address for receiving OSSEC alerts', SiteConfig.ValidateOSSECEmail(), None, lambda config: True], ['journalist_alert_gpg_public_key', '', str, - u'Local filepath to journalist alerts GPG public key (optional)', + 'Local filepath to journalist alerts GPG public key (optional)', SiteConfig.ValidateOptionalPath(self.args.ansible_path), None, lambda config: True], ['journalist_gpg_fpr', '', str, - u'Full fingerprint for the journalist alerts ' - u'GPG public key (optional)', + 'Full fingerprint for the journalist alerts ' + + 'GPG public key (optional)', SiteConfig.ValidateOptionalFingerprint(), self.sanitize_fingerprint, lambda config: config.get('journalist_alert_gpg_public_key')], ['journalist_alert_email', '', str, - u'Email address for receiving journalist alerts (optional)', + 'Email address for receiving journalist alerts (optional)', SiteConfig.ValidateOptionalEmail(), None, lambda config: config.get('journalist_alert_gpg_public_key')], ['smtp_relay', "smtp.gmail.com", str, - u'SMTP relay for sending OSSEC alerts', + 'SMTP relay for sending OSSEC alerts', SiteConfig.ValidateNotEmpty(), None, lambda config: True], ['smtp_relay_port', 587, int, - u'SMTP port for sending OSSEC alerts', + 'SMTP port for sending OSSEC alerts', SiteConfig.ValidateInt(), int, lambda config: True], ['sasl_domain', "gmail.com", str, - u'SASL domain for sending OSSEC alerts', + 'SASL domain for sending OSSEC alerts', None, None, lambda config: True], ['sasl_username', '', str, - u'SASL username for sending OSSEC alerts', + 'SASL username for sending OSSEC alerts', SiteConfig.ValidateOSSECUsername(), None, lambda config: True], ['sasl_password', '', str, - u'SASL password for sending OSSEC alerts', + 'SASL password for sending OSSEC alerts', SiteConfig.ValidateOSSECPassword(), None, lambda config: True], ['enable_ssh_over_tor', True, bool, - u'Enable SSH over Tor (recommended, disables SSH over LAN). ' - u'If you respond no, SSH will be available over LAN only', + 'Enable SSH over Tor (recommended, disables SSH over LAN). ' + + 'If you respond no, SSH will be available over LAN only', SiteConfig.ValidateYesNo(), lambda x: x.lower() == 'yes', lambda config: True], ['securedrop_supported_locales', [], list, - u'Space separated list of additional locales to support ' + 'Space separated list of additional locales to support ' '(' + translations + ')', SiteConfig.ValidateLocales(self.args.app_path), str.split, lambda config: True], ['v2_onion_services', self.check_for_v2_onion(), bool, - u'Do you want to enable v2 onion services (recommended only for SecureDrop instances installed before 1.0.0)?', # noqa: E501 + 'Do you want to enable v2 onion services (recommended only for SecureDrop instances installed before 1.0.0)?', # noqa: E501 SiteConfig.ValidateYesNo(), lambda x: x.lower() == 'yes', lambda config: True], ['v3_onion_services', self.check_for_v3_onion, bool, - u'Do you want to enable v3 onion services (recommended)?', + 'Do you want to enable v3 onion services (recommended)?', SiteConfig.ValidateYesNoForV3(self), lambda x: x.lower() == 'yes', lambda config: True], @@ -698,12 +698,25 @@ def restore_securedrop(args): # Would like readable output if there's a problem os.environ["ANSIBLE_STDOUT_CALLBACK"] = "debug" - ansible_cmd = [ + ansible_cmd_full_restore = [ 'ansible-playbook', os.path.join(args.ansible_path, 'securedrop-restore.yml'), '-e', "restore_file='{}'".format(restore_file_basename), ] + + ansible_cmd_skip_tor = [ + 'ansible-playbook', + os.path.join(args.ansible_path, 'securedrop-restore.yml'), + '-e', + "restore_file='{}' restore_skip_tor='True'".format(restore_file_basename), + ] + + if args.restore_skip_tor: + ansible_cmd = ansible_cmd_skip_tor + else: + ansible_cmd = ansible_cmd_full_restore + return subprocess.check_call(ansible_cmd, cwd=args.ansible_path) @@ -724,7 +737,7 @@ def run_tails_config(args): def check_for_updates_wrapper(args): - res, tag = check_for_updates(args) + check_for_updates(args) # Because the command worked properly exit with 0. return 0 @@ -935,6 +948,10 @@ class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter, help=restore_securedrop.__doc__) parse_restore.set_defaults(func=restore_securedrop) parse_restore.add_argument("restore_file") + parse_restore.add_argument("--preserve-tor-config", default=False, + action='store_true', + dest='restore_skip_tor', + help="Preserve the server's current Tor config") parse_update = subparsers.add_parser('update', help=update.__doc__) parse_update.set_defaults(func=update) diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '1.2.2' +version = '1.3.0' # The full version, including alpha/beta/rc tags. -release = '1.2.2' +release = '1.3.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py --- a/install_files/ansible-base/callback_plugins/ansible_version_check.py +++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py @@ -19,13 +19,18 @@ def print_red_bold(text): class CallbackModule(CallbackBase): def __init__(self): - # Can't use `on_X` because this isn't forwards compatible - # with Ansible 2.0+ - required_version = '2.7.13' # Keep synchronized with requirements files - if not ansible.__version__.startswith(required_version): + # The acceptable version range needs to be synchronized with + # requirements files. + viable_start = [2, 9, 7] + viable_end = [2, 10, 0] + ansible_version = [int(v) for v in ansible.__version__.split('.')] + if not (viable_start <= ansible_version < viable_end): print_red_bold( - "SecureDrop restriction: only Ansible {version}.*" - "is supported." - .format(version=required_version) + "SecureDrop restriction: Ansible version must be at least {viable_start} " + "and less than {viable_end}." + .format( + viable_start='.'.join(str(v) for v in viable_start), + viable_end='.'.join(str(v) for v in viable_end), + ) ) sys.exit(1) diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py --- a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py +++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py @@ -14,7 +14,7 @@ ossec_version: description: - version number of release to download - default: "3.0.0" + default: "3.6.0" required: no notes: - The OSSEC version to download is hardcoded to avoid surprises. @@ -23,15 +23,13 @@ ''' EXAMPLES = ''' - ossec_urls: - ossec_version: "3.0.0" + ossec_version: "3.6.0" ''' -import re # noqa: F401 - HAS_REQUESTS = True try: - import requests # noqa: F401 + import requests # lgtm [py/unused-import] # noqa: F401 except ImportError: HAS_REQUESTS = False @@ -70,7 +68,7 @@ def ossec_signature_filename(self): def main(): module = AnsibleModule( # noqa: F405 argument_spec=dict( - ossec_version=dict(default="3.0.0"), + ossec_version=dict(default="3.6.0"), ), supports_check_mode=False ) @@ -80,7 +78,7 @@ def main(): ossec_version = module.params['ossec_version'] try: ossec_config = OSSECURLs(ossec_version=ossec_version) - except: # noqa: E722 + except Exception: msg = ("Failed to find checksum information for OSSEC v{}." "Ensure you have the proper release specified, " "and check the download page to confirm: " diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py --- a/journalist_gui/journalist_gui/SecureDropUpdater.py +++ b/journalist_gui/journalist_gui/SecureDropUpdater.py @@ -7,6 +7,7 @@ import pexpect import socket import sys +import syslog as log from journalist_gui import updaterUI, strings, resources_rc # noqa @@ -35,9 +36,7 @@ def prevent_second_instance(app: QtWidgets.QApplication, name: str) -> None: # app.instance_binding.bind(IDENTIFIER) except OSError as e: if e.errno == ALREADY_BOUND_ERRNO: - err_dialog = QtWidgets.QMessageBox() - err_dialog.setText(name + strings.app_is_already_running) - err_dialog.exec() + log.syslog(log.LOG_NOTICE, name + strings.app_is_already_running) sys.exit() else: raise @@ -129,12 +128,13 @@ def run(self): tailsconfig_command = ("/home/amnesia/Persistent/" "securedrop/securedrop-admin " "tailsconfig") + self.failure_reason = "" try: child = pexpect.spawn(tailsconfig_command) child.expect('SUDO password:') self.output += child.before.decode('utf-8') child.sendline(self.sudo_password) - child.expect(pexpect.EOF) + child.expect(pexpect.EOF, timeout=120) self.output += child.before.decode('utf-8') child.close() @@ -142,13 +142,15 @@ def run(self): # failures in the Ansible output. if child.exitstatus: self.update_success = False - self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa + if "[sudo via ansible" in self.output: + self.failure_reason = strings.tailsconfig_failed_sudo_password + else: + self.failure_reason = strings.tailsconfig_failed_generic_reason else: self.update_success = True except pexpect.exceptions.TIMEOUT: self.update_success = False - self.failure_reason = strings.tailsconfig_failed_sudo_password - + self.failure_reason = strings.tailsconfig_failed_timeout except subprocess.CalledProcessError: self.update_success = False self.failure_reason = strings.tailsconfig_failed_generic_reason diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py --- a/journalist_gui/journalist_gui/strings.py +++ b/journalist_gui/journalist_gui/strings.py @@ -25,12 +25,15 @@ "Contact your SecureDrop administrator " "or [email protected] immediately.") tailsconfig_failed_sudo_password = ('Administrator password incorrect. ' - 'Exiting upgrade - ' - 'click Update Now to try again.') + 'Click Update Now to try again.') tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. " "Contact your administrator. " "If you are an administrator, contact " "[email protected].") +tailsconfig_failed_timeout = ("Tails workstation configuration took too long. " + "Contact your administrator. " + "If you are an administrator, contact " + "[email protected].") install_update_button = 'Update Now' install_later_button = 'Update Later' sudo_password_text = ("Enter the Tails Administrator password you " diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py --- a/securedrop/create-dev-data.py +++ b/securedrop/create-dev-data.py @@ -4,6 +4,7 @@ import datetime import os import argparse +from itertools import cycle from flask import current_app from sqlalchemy.exc import IntegrityError @@ -15,6 +16,18 @@ from db import db from models import Journalist, Reply, Source, Submission +submissions = cycle([ + 'This is a test submission without markup!', + 'This is a test submission with markup and characters such as \, \\, \', \" and ". ' + # noqa: W605, E501 + '<strong>This text should not be bold</strong>!' +]) + +replies = cycle([ + 'This is a test reply without markup!', + 'This is a test reply with markup and characters such as \, \\, \', \" and ". ' + # noqa: W605, E501 + '<strong>This text should not be bold</strong>!' +]) + def main(staging=False): app = journalist_app.create_app(config) @@ -36,29 +49,51 @@ def main(staging=False): test_otp_secret, is_admin=False) + journalist_tobe_deleted = add_test_user("clarkkent", + test_password, + test_otp_secret, + is_admin=False, + first_name="Clark", + last_name="Kent") + # Add test sources and submissions num_sources = int(os.getenv('NUM_SOURCES', 2)) - for _ in range(num_sources): - create_source_and_submissions() + for i in range(1, num_sources + 1): + if i == 1: + # For the first source, the journalist who replied will be deleted + create_source_and_submissions( + i, num_sources, journalist_who_replied=journalist_tobe_deleted + ) + continue + create_source_and_submissions(i, num_sources) + # Now let us delete one journalist + db.session.delete(journalist_tobe_deleted) + db.session.commit() -def add_test_user(username, password, otp_secret, is_admin=False): +def add_test_user(username, password, otp_secret, is_admin=False, + first_name="", last_name=""): try: user = Journalist(username=username, password=password, - is_admin=is_admin) + is_admin=is_admin, + first_name=first_name, + last_name=last_name) user.otp_secret = otp_secret db.session.add(user) db.session.commit() print('Test user successfully added: ' 'username={}, password={}, otp_secret={}, is_admin={}' ''.format(username, password, otp_secret, is_admin)) + return user except IntegrityError: print("Test user already added") db.session.rollback() -def create_source_and_submissions(num_submissions=2, num_replies=2): +def create_source_and_submissions( + source_index, source_count, num_submissions=2, num_replies=2, journalist_who_replied=None +): # Store source in database codename = current_app.crypto_util.genrandomid() filesystem_id = current_app.crypto_util.hash_codename(codename) @@ -79,7 +114,7 @@ def create_source_and_submissions(num_submissions=2, num_replies=2): source.filesystem_id, source.interaction_count, source.journalist_filename, - 'test submission!' + next(submissions) ) source.last_updated = datetime.datetime.utcnow() submission = Submission(source, fpath) @@ -91,20 +126,27 @@ def create_source_and_submissions(num_submissions=2, num_replies=2): fname = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) current_app.crypto_util.encrypt( - 'this is a test reply!', - [current_app.crypto_util.getkey(source.filesystem_id), + next(replies), + [current_app.crypto_util.get_fingerprint(source.filesystem_id), config.JOURNALIST_KEY], current_app.storage.path(source.filesystem_id, fname)) - journalist = Journalist.query.first() + if not journalist_who_replied: + journalist = Journalist.query.first() + else: + journalist = journalist_who_replied reply = Reply(journalist, source, fname) db.session.add(reply) db.session.commit() - print("Test source (codename: '{}', journalist designation '{}') " - "added with {} submissions and {} replies".format( - codename, journalist_designation, num_submissions, num_replies)) + print( + "Test source {}/{} (codename: '{}', journalist designation '{}') " + "added with {} submissions and {} replies".format( + source_index, source_count, codename, journalist_designation, + num_submissions, num_replies + ) + ) if __name__ == "__main__": # pragma: no cover diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import collections from distutils.version import StrictVersion import pretty_bad_protocol as gnupg import os @@ -12,6 +11,7 @@ from datetime import date from flask import current_app from pretty_bad_protocol._util import _is_stream, _make_binary_stream +from redis import Redis import rm @@ -55,32 +55,6 @@ def monkey_patch_delete_handle_status(self, key, value): gnupg._parsers.DeleteResult._handle_status = monkey_patch_delete_handle_status -class FIFOCache(): - """ - We implemented this simple cache instead of using functools.lru_cache - (this uses a different cache replacement policy (FIFO), but either - FIFO or LRU works for our key fingerprint cache) - due to the inability to remove an item from its cache. - - See: https://bugs.python.org/issue28178 - """ - def __init__(self, maxsize: int): - self.maxsize = maxsize - self.cache = collections.OrderedDict() # type: collections.OrderedDict - - def get(self, item): - if item in self.cache: - return self.cache[item] - - def put(self, item, value): - self.cache[item] = value - if len(self.cache) > self.maxsize: - self.cache.popitem(last=False) - - def delete(self, item): - del self.cache[item] - - class CryptoException(Exception): pass @@ -99,8 +73,8 @@ class CryptoUtil: # to set an expiration date. DEFAULT_KEY_EXPIRATION_DATE = '0' - keycache_limit = 1000 - keycache = FIFOCache(keycache_limit) + REDIS_FINGERPRINT_HASH = "sd/crypto-util/fingerprints" + REDIS_KEY_HASH = "sd/crypto-util/keys" def __init__(self, scrypt_params, @@ -114,7 +88,7 @@ def __init__(self, self.__securedrop_root = securedrop_root self.__word_list = word_list - if os.environ.get('SECUREDROP_ENV') == 'test': + if os.environ.get('SECUREDROP_ENV') in ('dev', 'test'): # Optimize crypto to speed up tests (at the expense of security # DO NOT use these settings in production) self.__gpg_key_length = 1024 @@ -148,6 +122,8 @@ def __init__(self, with io.open(adjectives_file) as f: self.adjectives = f.read().splitlines() + self.redis = Redis(decode_responses=True) + # Make sure these pass before the app can run def do_runtime_tests(self): if self.scrypt_id_pepper == self.scrypt_gpg_pepper: @@ -243,7 +219,7 @@ def genkeypair(self, name, secret): return genkey_obj def delete_reply_keypair(self, source_filesystem_id): - key = self.getkey(source_filesystem_id) + key = self.get_fingerprint(source_filesystem_id) # If this source was never flagged for review, they won't have a reply # keypair if not key: @@ -254,29 +230,45 @@ def delete_reply_keypair(self, source_filesystem_id): temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) # The subkeys keyword argument deletes both secret and public keys. temp_gpg.delete_keys(key, secret=True, subkeys=True) - self.keycache.delete(source_filesystem_id) + self.redis.hdel(self.REDIS_KEY_HASH, self.get_fingerprint(source_filesystem_id)) + self.redis.hdel(self.REDIS_FINGERPRINT_HASH, source_filesystem_id) + + def get_fingerprint(self, name): + """ + Returns the fingerprint of the GPG key for the given name. - def getkey(self, name): - fingerprint = self.keycache.get(name) - if fingerprint: # cache hit + The supplied name is usually a source filesystem ID. + """ + fingerprint = self.redis.hget(self.REDIS_FINGERPRINT_HASH, name) + if fingerprint: return fingerprint - # cache miss for key in self.gpg.list_keys(): for uid in key['uids']: if name in uid: - self.keycache.put(name, key['fingerprint']) + self.redis.hset(self.REDIS_FINGERPRINT_HASH, name, key['fingerprint']) return key['fingerprint'] return None - def export_pubkey(self, name): - fingerprint = self.getkey(name) - if fingerprint: - return self.gpg.export_keys(fingerprint) - else: + def get_pubkey(self, name): + """ + Returns the GPG public key for the given name. + + The supplied name is usually a source filesystem ID. + """ + fingerprint = self.get_fingerprint(name) + if not fingerprint: return None + key = self.redis.hget(self.REDIS_KEY_HASH, fingerprint) + if key: + return key + + key = self.gpg.export_keys(fingerprint) + self.redis.hset(self.REDIS_KEY_HASH, fingerprint, key) + return key + def encrypt(self, plaintext, fingerprints, output=None): # Verify the output path if output: diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py --- a/securedrop/i18n_tool.py +++ b/securedrop/i18n_tool.py @@ -369,10 +369,26 @@ def set_update_from_weblate_parser(self, subps): def set_list_locales_parser(self, subps): parser = subps.add_parser('list-locales', help='List supported locales') + parser.add_argument( + '--python', + action='store_true', + help=('Print the locales as a Python list suitable for config.py') + ) + parser.add_argument( + '--lines', + action='store_true', + help=('List one locale per line') + ) parser.set_defaults(func=self.list_locales) def list_locales(self, args): - print(sorted(list(self.SUPPORTED_LANGUAGES.keys()) + ['en_US'])) + if args.lines: + for l in sorted(list(self.SUPPORTED_LANGUAGES.keys()) + ['en_US']): + print(l) + elif args.python: + print(sorted(list(self.SUPPORTED_LANGUAGES.keys()) + ['en_US'])) + else: + print(" ".join(sorted(list(self.SUPPORTED_LANGUAGES.keys()) + ['en_US']))) def set_list_translators_parser(self, subps): parser = subps.add_parser('list-translators', diff --git a/securedrop/journalist.py b/securedrop/journalist.py --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -16,7 +16,7 @@ def prime_keycache(): """ with app.app_context(): for source in Source.query.filter_by(pending=False).all(): - app.crypto_util.getkey(source.filesystem_id) + app.crypto_util.get_pubkey(source.filesystem_id) prime_keycache() diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py --- a/securedrop/journalist_app/admin.py +++ b/securedrop/journalist_app/admin.py @@ -59,6 +59,7 @@ def update_submission_preferences(): form = SubmissionPreferencesForm() if form.validate_on_submit(): # The UI prompt ("prevent") is the opposite of the setting ("allow"): + flash(gettext("Preferences saved."), "submission-preferences-success") value = not bool(request.form.get('prevent_document_uploads')) InstanceConfig.set('allow_document_uploads', value) return redirect(url_for('admin.manage_config')) diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -33,7 +33,7 @@ def remove_star(filesystem_id): def col(filesystem_id): form = ReplyForm() source = get_source(filesystem_id) - source.has_key = current_app.crypto_util.getkey(filesystem_id) + source.has_key = current_app.crypto_util.get_fingerprint(filesystem_id) return render_template("col.html", filesystem_id=filesystem_id, source=source, form=form) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -108,7 +108,7 @@ def reply(): g.source.journalist_filename) current_app.crypto_util.encrypt( form.message.data, - [current_app.crypto_util.getkey(g.filesystem_id), + [current_app.crypto_util.get_fingerprint(g.filesystem_id), config.JOURNALIST_KEY], output=current_app.storage.path(g.filesystem_id, filename), ) diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py --- a/securedrop/management/submissions.py +++ b/securedrop/management/submissions.py @@ -144,7 +144,6 @@ def delete_disconnected_fs_submissions(args): time_elapsed = 0.0 rate = 1.0 filecount = len(disconnected_files) - eta = 1.0 eta_msg = "" for i, f in enumerate(disconnected_files, 1): remove = args.force diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -118,7 +118,7 @@ def collection(self): @property def fingerprint(self): - return current_app.crypto_util.getkey(self.filesystem_id) + return current_app.crypto_util.get_fingerprint(self.filesystem_id) @fingerprint.setter def fingerprint(self, value): @@ -131,7 +131,7 @@ def fingerprint(self): @property def public_key(self): # type: () -> str - return current_app.crypto_util.export_pubkey(self.filesystem_id) + return current_app.crypto_util.get_pubkey(self.filesystem_id) @public_key.setter def public_key(self, value): @@ -184,6 +184,8 @@ def to_json(self): class Submission(db.Model): + MAX_MESSAGE_LEN = 100000 + __tablename__ = 'submissions' id = Column(Integer, primary_key=True) uuid = Column(String(36), unique=True, nullable=False) @@ -278,6 +280,15 @@ def __repr__(self): def to_json(self): # type: () -> Dict[str, Union[str, int, bool]] + username = "deleted" + first_name = "" + last_name = "" + uuid = "deleted" + if self.journalist: + username = self.journalist.username + first_name = self.journalist.first_name + last_name = self.journalist.last_name + uuid = self.journalist.uuid json_submission = { 'source_url': url_for('api.single_source', source_uuid=self.source.uuid), @@ -286,10 +297,10 @@ def to_json(self): reply_uuid=self.uuid), 'filename': self.filename, 'size': self.size, - 'journalist_username': self.journalist.username, - 'journalist_first_name': self.journalist.first_name, - 'journalist_last_name': self.journalist.last_name, - 'journalist_uuid': self.journalist.uuid, + 'journalist_username': username, + 'journalist_first_name': first_name, + 'journalist_last_name': last_name, + 'journalist_uuid': uuid, 'uuid': self.uuid, 'is_deleted_by_source': self.deleted_by_source, } @@ -782,7 +793,7 @@ def get_current(cls): ''' try: - return cls.query.filter(cls.valid_until == None).one() # noqa: E711 + return cls.query.filter(cls.valid_until == None).one() # lgtm [py/test-equals-none] # noqa: E711, E501 except NoResultFound: current = cls() db.session.add(current) diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py --- a/securedrop/qa_loader.py +++ b/securedrop/qa_loader.py @@ -1,37 +1,36 @@ #!/opt/venvs/securedrop-app-code/bin/python # -*- coding: utf-8 -*- -import math import os import random import string import sys - from argparse import ArgumentParser from datetime import datetime -from flask import current_app +from itertools import cycle from os import path -from sqlalchemy import text + +from flask import current_app from crypto_util import DICEWARE_SAFE_CHARS from db import db from journalist_app import create_app -from models import (Journalist, Source, Submission, SourceStar, Reply, - JournalistLoginAttempt) +from models import Journalist, JournalistLoginAttempt, Reply, Source, SourceStar, Submission from sdconfig import config as sdconfig -random.seed('~(=^–^)') # mrow? + +random.seed("~(=^–^)") # mrow? def random_bool(): return bool(random.getrandbits(1)) -def random_chars(len, nullable, chars=string.printable): +def random_chars(len, nullable, chars=string.ascii_letters): if nullable and random_bool(): return None else: - return ''.join([random.choice(chars) for _ in range(len)]) + return "".join([random.choice(chars) for _ in range(len)]) def bool_or_none(): @@ -42,10 +41,11 @@ def random_datetime(nullable): if nullable and random_bool(): return None else: + now = datetime.now() return datetime( - year=random.randint(1, 9999), - month=random.randint(1, 12), - day=random.randint(1, 28), + year=random.randint(2013, now.year), + month=random.randint(1, now.month), + day=random.randint(1, now.day), hour=random.randint(0, 23), minute=random.randint(0, 59), second=random.randint(0, 59), @@ -56,43 +56,83 @@ def random_datetime(nullable): def positive_int(s): i = int(s) if i < 1: - raise ValueError('{} is not >= 1'.format(s)) + raise ValueError("{} is not >= 1".format(s)) return i -class QaLoader(object): +def fraction(s): + f = float(s) + if 0 <= f <= 1: + return f + raise ValueError("{} should be a float between 0 and 1".format(s)) + + +submissions = cycle( + [ + "This is a test submission without markup!", + 'This is a test submission with markup and characters such as \, \\, \', " and ". ' + + "<strong>This text should not be bold</strong>!", # noqa: W605, E501 + ] +) + + +replies = cycle( + [ + "This is a test reply without markup!", + 'This is a test reply with markup and characters such as \, \\, \', " and ". ' + + "<strong>This text should not be bold</strong>!", # noqa: W605, E501 + ] +) - JOURNALIST_COUNT = 10 - SOURCE_COUNT = 50 - def __init__(self, config, multiplier): +class QaLoader(object): + def __init__( + self, + config, + journalist_count=10, + source_count=50, + submissions_per_source=1, + replies_per_source=1, + source_star_fraction=0.1, + source_reply_fraction=0.5, + ): + """ + source_star_fraction and source_reply_fraction are simply the + fraction of sources starred or replied to. + """ self.config = config self.app = create_app(config) - self.multiplier = multiplier + + self.journalist_count = journalist_count + self.source_count = source_count + self.submissions_per_source = submissions_per_source + self.replies_per_source = replies_per_source + self.source_star_fraction = source_star_fraction + self.source_reply_fraction = source_reply_fraction self.journalists = [] self.sources = [] - self.submissions = [] def new_journalist(self): # Make a diceware-like password - pw = ' '.join( - [random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) - for _ in range(7)]) - journalist = Journalist(username=random_chars(random.randint(3, 32), nullable=False), - password=pw, - is_admin=random_bool()) + pw = " ".join( + [random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS) for _ in range(7)] + ) + journalist = Journalist( + username=random_chars(random.randint(3, 32), nullable=False), + password=pw, + is_admin=random_bool(), + ) if random_bool(): # to add legacy passwords back in journalist.passphrase_hash = None - journalist.pw_salt = random_chars(32, nullable=False).encode('utf-8') - journalist.pw_hash = random_chars(64, nullable=False).encode('utf-8') + journalist.pw_salt = random_chars(32, nullable=False).encode("utf-8") + journalist.pw_hash = random_chars(64, nullable=False).encode("utf-8") journalist.is_admin = bool_or_none() journalist.is_totp = bool_or_none() - journalist.hotp_counter = (random.randint(-1000, 1000) - if random_bool() else None) + journalist.hotp_counter = random.randint(-1000, 1000) if random_bool() else None journalist.created_on = random_datetime(nullable=True) journalist.last_access = random_datetime(nullable=True) @@ -101,57 +141,36 @@ def new_journalist(self): self.journalists.append(journalist.id) def new_source(self): - fid_len = random.randint(4, 32) - designation_len = random.randint(4, 32) - source = Source(random_chars(fid_len, nullable=False, - chars=string.ascii_lowercase), - random_chars(designation_len, nullable=False)) - source.flagged = bool_or_none() - source.last_updated = random_datetime(nullable=False) - source.pending = False - + codename = current_app.crypto_util.genrandomid() + filesystem_id = current_app.crypto_util.hash_codename(codename) + journalist_designation = current_app.crypto_util.display_id() + source = Source(filesystem_id, journalist_designation) db.session.add(source) db.session.flush() + + # Generate submissions directory and generate source key + os.mkdir(current_app.storage.path(source.filesystem_id)) + current_app.crypto_util.genkeypair(source.filesystem_id, codename) + self.sources.append(source.id) def new_submission(self, source_id): source = Source.query.get(source_id) - # A source may have a null fid according to the DB, but this will - # break storage.path. - if source.filesystem_id is None: - return - - filename = self.fake_file(source.filesystem_id) - submission = Submission(source, filename) - - # For issue #1189 - if random_bool(): - submission.source_id = None + source.interaction_count += 1 + fpath = current_app.storage.save_message_submission( + source.filesystem_id, + source.interaction_count, + source.journalist_filename, + next(submissions), + ) + submission = Submission(source, fpath) + db.session.add(submission) - submission.downloaded = bool_or_none() + source.pending = False + source.last_updated = datetime.utcnow() - db.session.add(submission) db.session.flush() - self.submissions.append(submission.id) - - def fake_file(self, source_fid): - source_dir = path.join(self.config.STORE_DIR, source_fid) - if not path.exists(source_dir): - os.mkdir(source_dir) - - filename = random_chars(20, - nullable=False, - chars=string.ascii_lowercase) - num = random.randint(0, 100) - msg_type = 'msg' if random_bool() else 'doc.gz' - filename = '{}-{}-{}.gpg'.format(num, filename, msg_type) - f_len = int(math.floor(random.expovariate(100000) * 1024 * 1024 * 500)) - sub_path = current_app.storage.path(source_fid, filename) - with open(sub_path, 'w') as f: - f.write('x' * f_len) - - return filename def new_source_star(self, source_id): source = Source.query.get(source_id) @@ -161,15 +180,24 @@ def new_source_star(self, source_id): def new_reply(self, journalist_id, source_id): source = Source.query.get(source_id) - # A source may have a null fid according to the DB, but this will - # break storage.path. - if source.filesystem_id is None: - return - journalist = Journalist.query.get(journalist_id) - filename = self.fake_file(source.filesystem_id) - reply = Reply(journalist, source, filename) + + source.interaction_count += 1 + source.last_updated = datetime.utcnow() + + fname = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) + current_app.crypto_util.encrypt( + next(replies), + [ + current_app.crypto_util.get_fingerprint(source.filesystem_id), + sdconfig.JOURNALIST_KEY + ], + current_app.storage.path(source.filesystem_id, fname), + ) + + reply = Reply(journalist, source, fname) db.session.add(reply) + db.session.flush() def new_journalist_login_attempt(self, journalist_id): journalist = Journalist.query.get(journalist_id) @@ -177,77 +205,116 @@ def new_journalist_login_attempt(self, journalist_id): attempt.timestamp = random_datetime(nullable=True) db.session.add(attempt) - def new_abandoned_submission(self, source_id): - '''For issue #1189''' - - source = Source.query.filter(Source.filesystem_id.isnot(None)).all()[0] - filename = self.fake_file(source.filesystem_id) - - # Use this as hack to create a real submission then swap out the - # source_id - submission = Submission(source, filename) - submission.source_id = source_id - db.session.add(submission) - db.session.commit() - self.delete_source(source_id) - - def delete_source(self, source_id): - '''For issue #1189''' - db.session.execute(text('DELETE FROM sources WHERE id = :source_id'), - {'source_id': source_id}) - def load(self): with self.app.app_context(): - for _ in range(self.JOURNALIST_COUNT * self.multiplier): + print("Creating {:d} journalists...".format(self.journalist_count)) + for i in range(1, self.journalist_count + 1): self.new_journalist() + if i % min(10, max(1, int(self.journalist_count / 10))) == 0: + sys.stdout.write("{}\r{}".format(" " * len(str(self.journalist_count + 1)), i)) + print("\n") db.session.commit() - for _ in range(self.SOURCE_COUNT * self.multiplier): + print("Creating {:d} sources...".format(self.source_count)) + for i in range(1, self.source_count + 1): self.new_source() + if i % min(10, max(1, int(self.source_count / 10))) == 0: + sys.stdout.write("{}\r{}".format(" " * len(str(self.source_count + 1)), i)) + print("\n") db.session.commit() - for sid in self.sources[0::5]: - for _ in range(1, self.multiplier + 1): + print( + "Creating submissions ({:d} each) for each source...".format( + self.submissions_per_source + ) + ) + for sid in self.sources: + for _ in range(1, self.submissions_per_source + 1): self.new_submission(sid) db.session.commit() - for sid in self.sources[0::5]: + print("Starring {:.2f}% of all sources...".format(self.source_star_fraction * 100)) + for sid in random.sample( + self.sources, int(self.source_count * self.source_star_fraction) + ): self.new_source_star(sid) db.session.commit() - for jid in self.journalists[0::10]: - for sid in self.sources[0::10]: - for _ in range(1, 3): - self.new_reply(jid, sid) + print( + "Creating replies ({:d} each) for {:.2f}% of sources...".format( + self.replies_per_source, self.source_reply_fraction * 100 + ) + ) + for sid in random.sample( + self.sources, int(self.source_count * self.source_reply_fraction) + ): + jid = random.choice(self.journalists) + for _ in range(self.replies_per_source): + self.new_reply(jid, sid) db.session.commit() - for jid in self.journalists[0::10]: + for jid in self.journalists: self.new_journalist_login_attempt(jid) db.session.commit() - for sid in random.sample(self.sources, self.multiplier): - self.new_abandoned_submission(sid) - def arg_parser(): parser = ArgumentParser( - path.basename(__file__), - description='Loads data into the database for testing upgrades') - parser.add_argument('-m', '--multiplier', type=positive_int, default=25, - help=('Factor to multiply the loaded data by ' - '(default 25)')) + path.basename(__file__), description="Loads data into the database for testing upgrades" + ) + parser.add_argument( + "--journalist-count", + type=positive_int, + default=10, + help=("Number of journalists to create"), + ) + parser.add_argument( + "--source-count", type=positive_int, default=50, help=("Number of sources to create") + ) + parser.add_argument( + "--submissions-per-source", + type=positive_int, + default=1, + help=("Number of submissions to create for each source"), + ) + parser.add_argument( + "--replies-per-source", + type=positive_int, + default=1, + help=("Number of replies to create for each source"), + ) + parser.add_argument( + "--source-star-fraction", + type=fraction, + default=0.1, + help=("Fraction of sources to star"), + ) + parser.add_argument( + "--source-reply-fraction", + type=fraction, + default=0.5, + help=("Fraction of sources to reply to"), + ) return parser def main(): args = arg_parser().parse_args() - print('Loading data. This make take a while.') - QaLoader(sdconfig, args.multiplier).load() - - -if __name__ == '__main__': + print("Loading data. This may take a while.") + QaLoader( + sdconfig, + args.journalist_count, + args.source_count, + args.submissions_per_source, + args.replies_per_source, + args.source_star_fraction, + args.source_reply_fraction, + ).load() + + +if __name__ == "__main__": try: main() except KeyboardInterrupt: - print('') # for prompt on a newline + print("") # for prompt on a newline sys.exit(1) diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py --- a/securedrop/source_app/api.py +++ b/securedrop/source_app/api.py @@ -3,6 +3,8 @@ from flask import Blueprint, current_app, make_response +from source_app.utils import get_sourcev2_url, get_sourcev3_url + import version @@ -16,7 +18,9 @@ def metadata(): 'gpg_fpr': config.JOURNALIST_KEY, 'sd_version': version.__version__, 'server_os': platform.linux_distribution()[1], - 'supported_languages': config.SUPPORTED_LOCALES + 'supported_languages': config.SUPPORTED_LOCALES, + 'v2_source_url': get_sourcev2_url(), + 'v3_source_url': get_sourcev3_url() } resp = make_response(json.dumps(meta)) resp.headers['Content-Type'] = 'application/json' diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py --- a/securedrop/source_app/forms.py +++ b/securedrop/source_app/forms.py @@ -1,9 +1,10 @@ +from flask import current_app from flask_babel import lazy_gettext as gettext from flask_wtf import FlaskForm -from wtforms import PasswordField -from wtforms.validators import InputRequired, Regexp, Length +from wtforms import FileField, PasswordField, TextAreaField +from wtforms.validators import InputRequired, Regexp, Length, ValidationError -from models import Source +from models import Source, Submission class LoginForm(FlaskForm): @@ -17,3 +18,20 @@ class LoginForm(FlaskForm): # Make sure to allow dashes since some words in the wordlist have them Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.')) ]) + + +class SubmissionForm(FlaskForm): + msg = TextAreaField("msg", render_kw={"placeholder": gettext("Write a message.")}) + fh = FileField("fh") + + def validate_msg(self, field): + if len(field.data) > Submission.MAX_MESSAGE_LEN: + message = gettext("Message text too long.") + if current_app.instance_config.allow_document_uploads: + message = "{} {}".format( + message, + gettext( + "Large blocks of text must be uploaded as a file, not copied and pasted." + ) + ) + raise ValidationError(message) diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py --- a/securedrop/source_app/main.py +++ b/securedrop/source_app/main.py @@ -2,6 +2,7 @@ import os import io +from base64 import urlsafe_b64encode from datetime import datetime from flask import (Blueprint, render_template, flash, redirect, url_for, g, session, current_app, request, Markup, abort) @@ -16,7 +17,7 @@ from source_app.utils import (logged_in, generate_unique_codename, async_genkey, normalize_timestamps, valid_codename, get_entropy_estimate) -from source_app.forms import LoginForm +from source_app.forms import LoginForm, SubmissionForm def make_blueprint(config): @@ -37,9 +38,17 @@ def generate(): return redirect(url_for('.lookup')) codename = generate_unique_codename(config) - session['codename'] = codename + + # Generate a unique id for each browser tab and associate the codename with this id. + # This will allow retrieval of the codename displayed in the tab from which the source has + # clicked to proceed to /generate (ref. issue #4458) + tab_id = urlsafe_b64encode(os.urandom(64)).decode() + codenames = session.get('codenames', {}) + codenames[tab_id] = codename + session['codenames'] = codenames + session['new_user'] = True - return render_template('generate.html', codename=codename) + return render_template('generate.html', codename=codename, tab_id=tab_id) @view.route('/org-logo') def select_logo(): @@ -51,33 +60,43 @@ def select_logo(): @view.route('/create', methods=['POST']) def create(): - filesystem_id = current_app.crypto_util.hash_codename( - session['codename']) - - source = Source(filesystem_id, current_app.crypto_util.display_id()) - db.session.add(source) - try: - db.session.commit() - except IntegrityError as e: - db.session.rollback() - current_app.logger.error( - "Attempt to create a source with duplicate codename: %s" % - (e,)) - - # Issue 2386: don't log in on duplicates - del session['codename'] - - # Issue 4361: Delete 'logged_in' if it's in the session - try: - del session['logged_in'] - except KeyError: - pass - - abort(500) + if session.get('logged_in', False): + flash(gettext("You are already logged in. Please verify your codename above as it " + + "may differ from the one displayed on the previous page."), + 'notification') else: - os.mkdir(current_app.storage.path(filesystem_id)) + tab_id = request.form['tab_id'] + codename = session['codenames'][tab_id] + session['codename'] = codename - session['logged_in'] = True + del session['codenames'] + + filesystem_id = current_app.crypto_util.hash_codename(codename) + + source = Source(filesystem_id, current_app.crypto_util.display_id()) + db.session.add(source) + try: + db.session.commit() + except IntegrityError as e: + db.session.rollback() + current_app.logger.error( + "Attempt to create a source with duplicate codename: %s" % + (e,)) + + # Issue 2386: don't log in on duplicates + del session['codename'] + + # Issue 4361: Delete 'logged_in' if it's in the session + try: + del session['logged_in'] + except KeyError: + pass + + abort(500) + else: + os.mkdir(current_app.storage.path(filesystem_id)) + + session['logged_in'] = True return redirect(url_for('.lookup')) @view.route('/lookup', methods=('GET',)) @@ -111,7 +130,7 @@ def lookup(): # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) - if not current_app.crypto_util.getkey(g.filesystem_id) and \ + if not current_app.crypto_util.get_fingerprint(g.filesystem_id) and \ g.source.flagged: db_uri = current_app.config['SQLALCHEMY_DATABASE_URI'] async_genkey(current_app.crypto_util, @@ -126,13 +145,21 @@ def lookup(): replies=replies, flagged=g.source.flagged, new_user=session.get('new_user', None), - haskey=current_app.crypto_util.getkey( - g.filesystem_id)) + haskey=current_app.crypto_util.get_fingerprint(g.filesystem_id), + form=SubmissionForm(), + ) @view.route('/submit', methods=('POST',)) @login_required def submit(): allow_document_uploads = current_app.instance_config.allow_document_uploads + form = SubmissionForm() + if not form.validate(): + for field, errors in form.errors.items(): + for error in errors: + flash(error, "error") + return redirect(url_for('main.lookup')) + msg = request.form['msg'] fh = None if allow_document_uploads and 'fh' in request.files: @@ -171,21 +198,23 @@ def submit(): fh.stream)) if first_submission: - msg = render_template('first_submission_flashed_message.html') - flash(Markup(msg), "success") + flash_message = render_template('first_submission_flashed_message.html') + flash(Markup(flash_message), "success") else: if msg and not fh: html_contents = gettext('Thanks! We received your message.') - elif not msg and fh: + elif fh and not msg: html_contents = gettext('Thanks! We received your document.') else: html_contents = gettext('Thanks! We received your message and ' 'document.') - msg = render_template('next_submission_flashed_message.html', - html_contents=html_contents) - flash(Markup(msg), "success") + flash_message = render_template( + 'next_submission_flashed_message.html', + html_contents=html_contents + ) + flash(Markup(flash_message), "success") new_submissions = [] for fname in fnames: @@ -277,8 +306,12 @@ def login(): @view.route('/logout') def logout(): + """ + If a user is logged in, show them a logout page that prompts them to + click the New Identity button in Tor Browser to complete their session. + Otherwise redirect to the main Source Interface page. + """ if logged_in(): - msg = render_template('logout_flashed_message.html') # Clear the session after we render the message so it's localized # If a user specified a locale, save it and restore it @@ -286,7 +319,8 @@ def logout(): session.clear() session['locale'] = user_locale - flash(Markup(msg), "important hide-if-not-tor-browser") - return redirect(url_for('.index')) + return render_template('logout.html') + else: + return redirect(url_for('.index')) return view diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py --- a/securedrop/source_app/utils.py +++ b/securedrop/source_app/utils.py @@ -9,6 +9,7 @@ from threading import Thread import i18n +import re from crypto_util import CryptoException from models import Source @@ -112,3 +113,31 @@ def normalize_timestamps(filesystem_id): "Couldn't normalize submission " "timestamps (touch exited with %d)" % rc) + + +def check_url_file(path, regexp): + """ + Check that a file exists at the path given and contains a single line + matching the regexp. Used for checking the source interface address + files at /var/lib/securedrop/source_{v2,v3}_url. + """ + try: + f = open(path, "r") + contents = f.readline().strip() + f.close() + if re.match(regexp, contents): + return contents + else: + return None + except IOError: + return None + + +def get_sourcev2_url(): + return check_url_file("/var/lib/securedrop/source_v2_url", + r"^[a-z0-9]{16}\.onion$") + + +def get_sourcev3_url(): + return check_url_file("/var/lib/securedrop/source_v3_url", + r"^[a-z0-9]{56}\.onion$") diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '1.2.2' +__version__ = '1.3.0' diff --git a/securedrop/worker.py b/securedrop/worker.py --- a/securedrop/worker.py +++ b/securedrop/worker.py @@ -88,7 +88,7 @@ def requeue_interrupted_jobs(queue_name=None): logging.debug("candidate job ids: {}".format(job_ids)) if not job_ids: - logging.info("No interrupted jobs found in started job registry.") + logging.debug("No interrupted jobs found in started job registry.") for job_id in job_ids: logging.debug("Considering job %s", job_id) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setuptools.setup( name="securedrop-app-code", - version="1.2.2", + version="1.3.0", author="Freedom of the Press Foundation", author_email="[email protected]", description="SecureDrop Server",
diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst --- a/docs/development/testing_configuration_tests.rst +++ b/docs/development/testing_configuration_tests.rst @@ -20,45 +20,19 @@ Installation Running the Config Tests ------------------------ -In order to run the tests, first create and provision the VM you intend -to test. - -For the staging VMs: +Testinfra tests are executed against a virtualized staging environment. To +provision the environment and run the tests, run the following commands: .. code:: sh make build-debs make staging - -The VMs will be set up using either the libvirt or virtualbox Vagrant VM provider, -depending on your system settings. You'll need to use the appropriate commands below -based on your choice of provider. - -Then, to run the tests: - -libvirt: -~~~~~~~~ - -.. code:: sh - - molecule verify -s libvirt-staging-xenial - -virtualbox: -~~~~~~~~~~~ - -.. code:: sh - - molecule verify -s virtualbox-staging-xenial - -.. tip:: To run only a single test, set ``PYTEST_ADDOPTS="-k name_of_test"`` - in your environment. + make testinfra Test failure against any host will generate a report with informative output about the specific test that triggered the error. Molecule will also exit with a non-zero status code. -.. note:: To build and test the VMs with one command, use the Molecule ``test`` - action: ``molecule test -s libvirt-staging-xenial --destroy=never``, or ``molecule test -s virtualbox-staging-xenial --destroy=never``. Updating the Config Tests ------------------------- diff --git a/docs/development/upgrade_testing.rst b/docs/development/upgrade_testing.rst --- a/docs/development/upgrade_testing.rst +++ b/docs/development/upgrade_testing.rst @@ -116,6 +116,8 @@ Log back into the *Application Server*, and repeat the previous commands: Navigate to the Source Interface URL again, and confirm you see the upgraded version in the footer. Then proceed with testing the new version. +.. _updating_upgrade_boxes: + Updating the base boxes used for upgrade testing ------------------------------------------------ @@ -124,10 +126,10 @@ new VM images, to enable testing against that base version in future upgrade testing. The procedure is as follows: 1. ``make clean`` to remove any previous artifacts (which would also be pushed) -2. ``git checkout <version>`` (if a point release, ``git checkout develop``) -3. ``make vagrant-package`` -4. ``cd molecule/vagrant-packager && ./push.yml`` to upload to S3 -5. Commit the local changes to JSON files and open a PR. +#. ``git checkout <version>`` +#. ``make vagrant-package`` +#. ``cd molecule/vagrant-packager && ./push.yml`` to upload to S3 +#. Commit the local changes to JSON files and open a PR. Subsequent invocations of ``make upgrade-start`` will pull the latest version of the box. diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py --- a/journalist_gui/test_gui.py +++ b/journalist_gui/test_gui.py @@ -13,7 +13,7 @@ @mock.patch('journalist_gui.SecureDropUpdater.sys.exit') [email protected]('journalist_gui.SecureDropUpdater.QtWidgets.QMessageBox') [email protected]('syslog.syslog') class TestSecondInstancePrevention(unittest.TestCase): def setUp(self): self.mock_app = mock.MagicMock() @@ -204,6 +204,18 @@ def test_tailsconfigThread_generic_failure(self, pt): @mock.patch('pexpect.spawn') def test_tailsconfigThread_sudo_password_is_wrong(self, pt): + child = pt() + before = MagicMock() + before.decode.return_value = "stuff[sudo via ansible, key=blahblahblah" + child.before = before + self.window.tails_thread.run() + self.assertNotIn("failed=0", self.window.output) + self.assertEqual(self.window.update_success, False) + self.assertEqual(self.window.failure_reason, + strings.tailsconfig_failed_sudo_password) + + @mock.patch('pexpect.spawn') + def test_tailsconfigThread_timeout(self, pt): child = pt() before = MagicMock() before.decode.side_effect = ["some data", @@ -213,7 +225,7 @@ def test_tailsconfigThread_sudo_password_is_wrong(self, pt): self.assertNotIn("failed=0", self.window.output) self.assertEqual(self.window.update_success, False) self.assertEqual(self.window.failure_reason, - strings.tailsconfig_failed_sudo_password) + strings.tailsconfig_failed_timeout) @mock.patch('pexpect.spawn') def test_tailsconfigThread_some_other_subprocess_error(self, pt): diff --git a/molecule/builder-xenial/tests/test_securedrop_deb_package.py b/molecule/builder-xenial/tests/test_securedrop_deb_package.py --- a/molecule/builder-xenial/tests/test_securedrop_deb_package.py +++ b/molecule/builder-xenial/tests/test_securedrop_deb_package.py @@ -401,9 +401,7 @@ def test_ossec_binaries_are_present_agent(host, deb): "/var/ossec/bin/ossec-syscheckd", "/var/ossec/bin/ossec-agentd", "/var/ossec/bin/manage_agents", - "/var/ossec/bin/ossec-lua", "/var/ossec/bin/ossec-control", - "/var/ossec/bin/ossec-luac", "/var/ossec/bin/ossec-logcollector", "/var/ossec/bin/util.sh", "/var/ossec/bin/ossec-execd", @@ -433,13 +431,11 @@ def test_ossec_binaries_are_present_server(host, deb): "/var/ossec/bin/ossec-reportd", "/var/ossec/bin/ossec-agentlessd", "/var/ossec/bin/manage_agents", - "/var/ossec/bin/ossec-lua", "/var/ossec/bin/rootcheck_control", "/var/ossec/bin/ossec-control", "/var/ossec/bin/ossec-dbd", "/var/ossec/bin/ossec-csyslogd", "/var/ossec/bin/ossec-regex", - "/var/ossec/bin/ossec-luac", "/var/ossec/bin/agent_control", "/var/ossec/bin/ossec-monitord", "/var/ossec/bin/clear_stats", diff --git a/molecule/builder-xenial/tests/test_security_updates.py b/molecule/builder-xenial/tests/test_security_updates.py --- a/molecule/builder-xenial/tests/test_security_updates.py +++ b/molecule/builder-xenial/tests/test_security_updates.py @@ -1,16 +1,34 @@ import os +from subprocess import check_output +import re +import pytest + SECUREDROP_TARGET_PLATFORM = os.environ.get("SECUREDROP_TARGET_PLATFORM") testinfra_hosts = [ "docker://{}-sd-sec-update".format(SECUREDROP_TARGET_PLATFORM) ] +def test_should_run(): + command = ["git", "describe", "--all"] + version = check_output(command).decode("utf8")[0:-1] + candidates = (r"(^tags/[\d]+\.[\d]+\.[\d]+-rc[\d]+)|" + r"(^tags/[\d]+\.[\d]+\.[\d]+)|" + r"(^heads/release/[\d]+\.[\d]+\.[\d]+)|" + r"(^heads/update-builder.*)") + result = re.match(candidates, version) + if result: + return True + else: + return False + + [email protected](not test_should_run(), reason="Only tested for RCs and builder updates") def test_ensure_no_updates_avail(host): """ Test to make sure that there are no security-updates in the base builder container. """ - # Filter out all the security repos to their own file # without this change all the package updates appeared as if they were # coming from normal ubuntu update channel (since they get posted to both) diff --git a/molecule/builder-xenial/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml --- a/molecule/builder-xenial/tests/vars.yml +++ b/molecule/builder-xenial/tests/vars.yml @@ -1,9 +1,9 @@ --- -securedrop_version: "1.2.2" -ossec_version: "3.0.0" +securedrop_version: "1.3.0" +ossec_version: "3.6.0" keyring_version: "0.1.3" config_version: "0.1.3" -grsec_version: "4.14.154" +grsec_version: "4.14.175" # These values will be interpolated with values populated above # via helper functions in the tests. diff --git a/molecule/fetch-tor-packages/tests/test_tor_packages.py b/molecule/fetch-tor-packages/tests/test_tor_packages.py --- a/molecule/fetch-tor-packages/tests/test_tor_packages.py +++ b/molecule/fetch-tor-packages/tests/test_tor_packages.py @@ -8,7 +8,7 @@ {"name": "tor", "arch": "amd64"}, {"name": "tor-geoipdb", "arch": "all"}, ] -TOR_VERSION = "0.4.1.6-1~xenial+1" +TOR_VERSION = "0.4.2.7-1~xenial+1" def test_tor_apt_repo(host): diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py @@ -92,8 +92,10 @@ def test_apache_headers_journalist_interface(host, header): securedrop_test_vars.apache_listening_address), "WSGIDaemonProcess journalist processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format( # noqa securedrop_test_vars.securedrop_code), - 'WSGIProcessGroup journalist', - 'WSGIScriptAlias / /var/www/journalist.wsgi', + ( + 'WSGIScriptAlias / /var/www/journalist.wsgi ' + 'process-group=journalist application-group=journalist' + ), 'WSGIPassAuthorization On', 'Header set Cache-Control "no-store"', "Alias /static {}/static".format(securedrop_test_vars.securedrop_code), diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/staging/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/staging/common/test_grsecurity.py @@ -185,12 +185,12 @@ def test_wireless_disabled_in_kernel_config(host, kernel_opts): remove wireless support from the kernel. Let's make sure wireless is disabled in the running kernel config! """ + with host.sudo(): + kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) + kernel_config = host.file(kernel_config_path).content_string - kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) - kernel_config = host.file(kernel_config_path).content_string - - line = "# CONFIG_{} is not set".format(kernel_opts) - assert line in kernel_config + line = "# CONFIG_{} is not set".format(kernel_opts) + assert line in kernel_config @pytest.mark.parametrize('kernel_opts', [ @@ -203,11 +203,12 @@ def test_kernel_options_enabled_config(host, kernel_opts): Tests kernel config for options that should be enabled """ - kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) - kernel_config = host.file(kernel_config_path).content_string + with host.sudo(): + kernel_config_path = "/boot/config-{}-grsec-securedrop".format(KERNEL_VERSION) + kernel_config = host.file(kernel_config_path).content_string - line = "{}=y".format(kernel_opts) - assert line in kernel_config + line = "{}=y".format(kernel_opts) + assert line in kernel_config def test_mds_mitigations_and_smt_disabled(host): @@ -216,7 +217,8 @@ def test_mds_mitigations_and_smt_disabled(host): see https://www.kernel.org/doc/html/latest/admin-guide/hw-vuln/mds.html """ - grub_config_path = "/boot/grub/grub.cfg" - grub_config = host.file(grub_config_path) + with host.sudo(): + grub_config_path = "/boot/grub/grub.cfg" + grub_config = host.file(grub_config_path) - assert grub_config.contains("mds=full,nosmt") + assert grub_config.contains("mds=full,nosmt") diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -218,6 +218,23 @@ def test_files(journalist_app, test_journo): 'replies': source.replies} [email protected](scope='function') +def test_files_deleted_journalist(journalist_app, test_journo): + with journalist_app.app_context(): + source, codename = utils.db_helper.init_source() + utils.db_helper.submit(source, 2) + test_journo['journalist'] + juser, _ = utils.db_helper.init_journalist("f", "l", is_admin=False) + utils.db_helper.reply(juser, source, 1) + utils.db_helper.delete_journalist(juser) + return {'source': source, + 'codename': codename, + 'filesystem_id': source.filesystem_id, + 'uuid': source.uuid, + 'submissions': source.submissions, + 'replies': source.replies} + + @pytest.fixture(scope='function') def journalist_api_token(journalist_app, test_journo): with journalist_app.test_client() as app: diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py --- a/securedrop/tests/functional/functional_test.py +++ b/securedrop/tests/functional/functional_test.py @@ -307,7 +307,7 @@ def wait_for_source_key(self, source_name): filesystem_id = self.source_app.crypto_util.hash_codename(source_name) def key_available(filesystem_id): - assert self.source_app.crypto_util.getkey(filesystem_id) + assert self.source_app.crypto_util.get_fingerprint(filesystem_id) self.wait_for(lambda: key_available(filesystem_id), timeout=60) diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -282,6 +282,26 @@ def updated_image(): # giving extra time for upload to complete self.wait_for(updated_image, timeout=self.timeout * 6) + def _admin_disallows_document_uploads(self): + if not self.driver.find_element_by_id("prevent_document_uploads").is_selected(): + self.safe_click_by_id("prevent_document_uploads") + self.safe_click_by_id("submit-submission-preferences") + + def preferences_saved(): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Preferences saved." in flash_msg.text + self.wait_for(preferences_saved, timeout=self.timeout * 6) + + def _admin_allows_document_uploads(self): + if self.driver.find_element_by_id("prevent_document_uploads").is_selected(): + self.safe_click_by_id("prevent_document_uploads") + self.safe_click_by_id("submit-submission-preferences") + + def preferences_saved(): + flash_msg = self.driver.find_element_by_css_selector(".flash") + assert "Preferences saved." in flash_msg.text + self.wait_for(preferences_saved, timeout=self.timeout * 6) + def _add_user(self, username, first_name="", last_name="", is_admin=False, hotp=None): self.safe_send_keys_by_css_selector('input[name="username"]', username) @@ -342,7 +362,7 @@ def user_token_added(): # Successfully verifying the code should redirect to the admin # interface, and flash a message indicating success flash_msg = self.driver.find_elements_by_css_selector(".flash") - assert "The two-factor code for user \"{user}\" was verified successfully.".format( + assert "The two-factor code for user \"{}\" was verified successfully.".format( self.new_user["username"] ) in [el.text for el in flash_msg] diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py --- a/securedrop/tests/functional/source_navigation_steps.py +++ b/securedrop/tests/functional/source_navigation_steps.py @@ -2,6 +2,9 @@ import time import json +import pytest +from selenium.common.exceptions import NoSuchElementException + class SourceNavigationStepsMixin: def _is_on_source_homepage(self): @@ -16,6 +19,9 @@ def _is_on_lookup_page(self): def _is_on_generate_page(self): return self.wait_for(lambda: self.driver.find_element_by_id("create-form")) + def _is_on_logout_page(self): + return self.wait_for(lambda: self.driver.find_element_by_id("click-new-identity-tor")) + def _source_visits_source_homepage(self): self.driver.get(self.source_location) assert self._is_on_source_homepage() @@ -37,6 +43,9 @@ def _source_clicks_submit_documents_on_homepage(self): # a diceware codename they can use for subsequent logins assert self._is_on_generate_page() + def _source_regenerates_codename(self): + self.safe_click_by_id("regenerate-submit") + def _source_chooses_to_submit_documents(self): self._source_clicks_submit_documents_on_homepage() @@ -45,7 +54,7 @@ def _source_chooses_to_submit_documents(self): assert len(codename.text) > 0 self.source_name = codename.text - def _source_shows_codename(self): + def _source_shows_codename(self, verify_source_name=True): content = self.driver.find_element_by_id("codename-hint-content") assert not content.is_displayed() @@ -54,7 +63,8 @@ def _source_shows_codename(self): self.wait_for(lambda: content.is_displayed()) assert content.is_displayed() content_content = self.driver.find_element_by_css_selector("#codename-hint-content p") - assert content_content.text == self.source_name + if verify_source_name: + assert content_content.text == self.source_name def _source_hides_codename(self): content = self.driver.find_element_by_id("codename-hint-content") @@ -188,7 +198,7 @@ def reply_deleted(): def _source_logs_out(self): self.safe_click_by_id("logout") - self.wait_for(lambda: ("Submit for the first time" in self.driver.page_source)) + assert self._is_on_logout_page() def _source_not_found(self): self.driver.get(self.source_location + "/unlikely") @@ -211,5 +221,26 @@ def _source_sees_session_timeout_message(self): notification = self.driver.find_element_by_css_selector(".important") if not hasattr(self, "accept_languages"): - expected_text = "Your session timed out due to inactivity." + expected_text = "You were logged out due to inactivity." + assert expected_text in notification.text + + def _source_sees_document_attachment_item(self): + assert self.driver.find_element_by_class_name("attachment") is not None + + def _source_does_not_sees_document_attachment_item(self): + with pytest.raises(NoSuchElementException): + self.driver.find_element_by_class_name("attachment") + + def _source_sees_already_logged_in_in_other_tab_message(self): + notification = self.driver.find_element_by_css_selector(".notification") + + if not hasattr(self, "accepted_languages"): + expected_text = "You are already logged in." + assert expected_text in notification.text + + def _source_sees_redirect_already_logged_in_message(self): + notification = self.driver.find_element_by_css_selector(".notification") + + if not hasattr(self, "accepted_languages"): + expected_text = "You were redirected because you are already logged in." assert expected_text in notification.text diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py --- a/securedrop/tests/functional/test_admin_interface.py +++ b/securedrop/tests/functional/test_admin_interface.py @@ -1,10 +1,12 @@ from . import functional_test as ft from . import journalist_navigation_steps +from . import source_navigation_steps class TestAdminInterface( ft.FunctionalTest, - journalist_navigation_steps.JournalistNavigationStepsMixin): + journalist_navigation_steps.JournalistNavigationStepsMixin, + source_navigation_steps.SourceNavigationStepsMixin): def test_admin_interface(self): self._admin_logs_in() @@ -61,3 +63,26 @@ def test_admin_adds_admin_user(self): self._admin_adds_a_user(is_admin=True) self._new_admin_user_can_log_in() self._admin_can_edit_new_user() + + def test_disallow_file_submission(self): + self._admin_logs_in() + self._admin_visits_admin_interface() + self._admin_visits_system_config_page() + self._admin_disallows_document_uploads() + + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_does_not_sees_document_attachment_item() + + def test_allow_file_submission(self): + self._admin_logs_in() + self._admin_visits_admin_interface() + self._admin_visits_system_config_page() + self._admin_disallows_document_uploads() + self._admin_allows_document_uploads() + + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + self._source_continues_to_submit_page() + self._source_sees_document_attachment_item() diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py --- a/securedrop/tests/functional/test_source.py +++ b/securedrop/tests/functional/test_source.py @@ -29,3 +29,106 @@ def test_journalist_key_from_source_interface(self): data = data.decode('utf-8') assert "BEGIN PGP PUBLIC KEY BLOCK" in data + + +class TestDuplicateSourceInterface( + functional_test.FunctionalTest, + source_navigation_steps.SourceNavigationStepsMixin): + + def get_codename_generate(self): + return self.driver.find_element_by_css_selector("#codename").text + + def get_codename_lookup(self): + return self.driver.find_element_by_css_selector("#codename-hint-content p").text + + def test_duplicate_generate_pages(self): + # Test generation of multiple codenames in different browser tabs, ref. issue 4458. + + # Generate a codename in Tab A + assert len(self.driver.window_handles) == 1 + tab_a = self.driver.current_window_handle + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_a = self.get_codename_generate() + + # Generate a different codename in Tab B + self.driver.execute_script("window.open()") + tab_b = self.driver.window_handles[1] + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_b = self.get_codename_generate() + + assert tab_a != tab_b + assert codename_a != codename_b + + # Proceed to submit documents in Tab A + self.driver.switch_to.window(tab_a) + assert self.driver.current_window_handle == tab_a + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_shows_codename(verify_source_name=False) + codename_lookup_a = self.get_codename_lookup() + assert codename_lookup_a == codename_a + self._source_submits_a_message() + + # Proceed to submit documents in Tab B + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_sees_already_logged_in_in_other_tab_message() + self._source_shows_codename(verify_source_name=False) + codename_lookup_b = self.get_codename_lookup() + # We expect the codename to be the one from Tab A + assert codename_lookup_b == codename_a + self._source_submits_a_message() + + def test_duplicate_generate_pages_with_refresh(self): + # Test generation of multiple codenames in different browser tabs, including behavior + # of refreshing the codemae in each tab. Ref. issue 4458. + + # Generate a codename in Tab A + assert len(self.driver.window_handles) == 1 + tab_a = self.driver.current_window_handle + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_a1 = self.get_codename_generate() + # Regenerate codename in Tab A + self._source_regenerates_codename() + codename_a2 = self.get_codename_generate() + assert codename_a1 != codename_a2 + + # Generate a different codename in Tab B + self.driver.execute_script("window.open()") + tab_b = self.driver.window_handles[1] + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_visits_source_homepage() + self._source_chooses_to_submit_documents() + codename_b = self.get_codename_generate() + assert codename_b != codename_a1 != codename_a2 + + # Proceed to submit documents in Tab A + self.driver.switch_to.window(tab_a) + assert self.driver.current_window_handle == tab_a + self._source_continues_to_submit_page() + assert self._is_on_lookup_page() + self._source_shows_codename(verify_source_name=False) + codename_lookup_a = self.get_codename_lookup() + assert codename_lookup_a == codename_a2 + self._source_submits_a_message() + + # Regenerate codename in Tab B + self.driver.switch_to.window(tab_b) + assert self.driver.current_window_handle == tab_b + self._source_regenerates_codename() + # We expect the source to be directed to /lookup with a flash message + assert self._is_on_lookup_page() + self._source_sees_redirect_already_logged_in_message() + # Check codename + self._source_shows_codename(verify_source_name=False) + codename_lookup_b = self.get_codename_lookup() + assert codename_lookup_b == codename_a2 + self._source_submits_a_message() diff --git a/securedrop/tests/pageslayout/test_source.py b/securedrop/tests/pageslayout/test_source.py --- a/securedrop/tests/pageslayout/test_source.py +++ b/securedrop/tests/pageslayout/test_source.py @@ -153,11 +153,11 @@ def test_index(self): self._source_visits_source_homepage() self._screenshot('source-index.png') - def test_logout_flashed_message(self): + def test_logout(self): self.disable_js_torbrowser_driver() self._source_visits_source_homepage() self._source_chooses_to_submit_documents() self._source_continues_to_submit_page() self._source_submits_a_file() self._source_logs_out() - self._screenshot('source-logout_flashed_message.png') + self._screenshot('source-logout_page.png') diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -11,7 +11,7 @@ import crypto_util import models -from crypto_util import CryptoUtil, CryptoException, FIFOCache +from crypto_util import CryptoUtil, CryptoException from db import db @@ -40,7 +40,7 @@ def test_encrypt_success(source_app, config, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -70,7 +70,7 @@ def test_encrypt_without_output(source_app, config, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY]) plaintext = source_app.crypto_util.decrypt( test_source['codename'], @@ -96,7 +96,7 @@ def test_encrypt_binary_stream(source_app, config, test_source): with io.open(os.path.realpath(__file__)) as fh: ciphertext = source_app.crypto_util.encrypt( fh, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -116,7 +116,7 @@ def test_encrypt_fingerprints_not_a_list_or_tuple(source_app, test_source): with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - source_app.crypto_util.getkey(test_source['filesystem_id']), + source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) plaintext = source_app.crypto_util.decrypt(test_source['codename'], @@ -133,7 +133,7 @@ def test_basic_encrypt_then_decrypt_multiple_recipients(source_app, with source_app.app_context(): ciphertext = source_app.crypto_util.encrypt( message, - [source_app.crypto_util.getkey(test_source['filesystem_id']), + [source_app.crypto_util.get_fingerprint(test_source['filesystem_id']), config.JOURNALIST_KEY], source_app.storage.path(test_source['filesystem_id'], 'somefile.gpg')) @@ -208,7 +208,7 @@ def test_genkeypair(source_app): db.session.commit() source_app.crypto_util.genkeypair(source.filesystem_id, codename) - assert source_app.crypto_util.getkey(filesystem_id) is not None + assert source_app.crypto_util.get_fingerprint(filesystem_id) is not None def parse_gpg_date_string(date_string): @@ -241,15 +241,15 @@ def test_reply_keypair_creation_and_expiration_dates(source_app): db.session.commit() source_app.crypto_util.genkeypair(source.filesystem_id, codename) - # crypto_util.getkey only returns the fingerprint of the key. We need + # crypto_util.get_fingerprint only returns the fingerprint of the key. We need # the full output of gpg.list_keys() to check the creation and # expire dates. # - # TODO: it might be generally useful to refactor crypto_util.getkey so + # TODO: it might be generally useful to refactor crypto_util.get_fingerprint so # it always returns the entire key dictionary instead of just the # fingerprint (which is always easily extracted from the entire key # dictionary). - new_key_fingerprint = source_app.crypto_util.getkey(filesystem_id) + new_key_fingerprint = source_app.crypto_util.get_fingerprint(filesystem_id) new_key = [key for key in source_app.crypto_util.gpg.list_keys() if new_key_fingerprint == key['fingerprint']][0] @@ -267,7 +267,7 @@ def test_reply_keypair_creation_and_expiration_dates(source_app): def test_delete_reply_keypair(source_app, test_source): fid = test_source['filesystem_id'] source_app.crypto_util.delete_reply_keypair(fid) - assert source_app.crypto_util.getkey(fid) is None + assert source_app.crypto_util.get_fingerprint(fid) is None def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source, @@ -285,7 +285,7 @@ def test_delete_reply_keypair_pinentry_status_is_handled(source_app, test_source captured = capsys.readouterr() assert "ValueError: Unknown status message: 'PINENTRY_LAUNCHED'" not in captured.err - assert source_app.crypto_util.getkey(fid) is None + assert source_app.crypto_util.get_fingerprint(fid) is None def test_delete_reply_keypair_no_key(source_app): @@ -295,25 +295,24 @@ def test_delete_reply_keypair_no_key(source_app): source_app.crypto_util.delete_reply_keypair('Reality Winner') -def test_getkey(source_app, test_source): - assert (source_app.crypto_util.getkey(test_source['filesystem_id']) +def test_get_fingerprint(source_app, test_source): + assert (source_app.crypto_util.get_fingerprint(test_source['filesystem_id']) is not None) # check that a non-existent key returns None - assert source_app.crypto_util.getkey('x' * 50) is None + assert source_app.crypto_util.get_fingerprint('x' * 50) is None -def test_export_pubkey(source_app, test_source): +def test_get_pubkey(source_app, test_source): begin_pgp = '-----BEGIN PGP PUBLIC KEY BLOCK----' # check that a filesystem_id exports the pubkey - exported = source_app.crypto_util.export_pubkey( - test_source['filesystem_id']) - assert exported.startswith(begin_pgp) + pubkey = source_app.crypto_util.get_pubkey(test_source['filesystem_id']) + assert pubkey.startswith(begin_pgp) # check that a non-existent identifer exports None - exported = source_app.crypto_util.export_pubkey('x' * 50) - assert exported is None + pubkey = source_app.crypto_util.get_pubkey('x' * 50) + assert pubkey is None @given( @@ -343,25 +342,3 @@ def test_encrypt_then_decrypt_gives_same_result( decrypted_text = crypto.decrypt(secret, ciphertext) assert decrypted_text == message - - -def test_fifo_cache(): - cache = FIFOCache(3) - - cache.put('item 1', 1) - cache.put('item 2', 2) - cache.put('item 3', 3) - - assert cache.get('item 1') == 1 - assert cache.get('item 2') == 2 - assert cache.get('item 3') == 3 - - cache.put('item 4', 4) - # Maxsize is 3, so adding item 4 should kick out item 1 - assert not cache.get('item 1') - assert cache.get('item 2') == 2 - assert cache.get('item 3') == 3 - assert cache.get('item 4') == 4 - - cache.delete('item 2') - assert not cache.get('item 2') diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -56,7 +56,8 @@ def test_submit_message(source_app, journalist_app, test_journo): with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) filesystem_id = g.filesystem_id # redirected to submission form resp = app.post('/submit', data=dict( @@ -153,7 +154,8 @@ def test_submit_file(source_app, journalist_app, test_journo): with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) filesystem_id = g.filesystem_id # redirected to submission form resp = app.post('/submit', data=dict( @@ -254,7 +256,8 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, with source_app.test_client() as app: app.get('/generate') - app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) codename = session['codename'] filesystem_id = g.filesystem_id # redirected to submission form @@ -301,7 +304,7 @@ def _helper_test_reply(journalist_app, source_app, config, test_journo, # Block up to 15s for the reply keypair, so we can test sending a reply def assertion(): - assert current_app.crypto_util.getkey(filesystem_id) is not None + assert current_app.crypto_util.get_fingerprint(filesystem_id) is not None utils.asynchronous.wait_for_assertion(assertion, 15) # Create 2 replies to test deleting on journalist and source interface @@ -474,7 +477,8 @@ def test_delete_collection(mocker, source_app, journalist_app, test_journo): # first, add a source with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) resp = app.post('/submit', data=dict( msg="This is a test.", fh=(BytesIO(b''), ''), @@ -523,7 +527,8 @@ def test_delete_collections(mocker, journalist_app, source_app, test_journo): num_sources = 2 for i in range(num_sources): app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) app.post('/submit', data=dict( msg="This is a test " + str(i) + ".", fh=(BytesIO(b''), ''), @@ -577,7 +582,8 @@ def test_filenames(source_app, journalist_app, test_journo): # add a source and submit stuff with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) _helper_filenames_submit(app) # navigate to the collection page @@ -603,7 +609,8 @@ def test_filenames_delete(journalist_app, source_app, test_journo): # add a source and submit stuff with source_app.test_client() as app: app.get('/generate') - app.post('/create') + tab_id = next(iter(session['codenames'].keys())) + app.post('/create', data={'tab_id': tab_id}) _helper_filenames_submit(app) # navigate to the collection page @@ -714,7 +721,8 @@ def test_prevent_document_uploads(source_app, journalist_app, test_admin): # Check that the source interface accepts only messages: with source_app.test_client() as app: app.get('/generate') - resp = app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') @@ -739,7 +747,8 @@ def test_no_prevent_document_uploads(source_app, journalist_app, test_admin): # Check that the source interface accepts both files and messages: with source_app.test_client() as app: app.get('/generate') - resp = app.post('/create', follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post('/create', data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -1297,6 +1297,11 @@ def test_prevent_document_uploads(journalist_app, test_admin): data=form.data, follow_redirects=True) assert InstanceConfig.get_current().allow_document_uploads is False + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.update_submission_preferences'), + data=form.data, + follow_redirects=True) + ins.assert_message_flashed('Preferences saved.', 'submission-preferences-success') def test_no_prevent_document_uploads(journalist_app, test_admin): @@ -1306,6 +1311,10 @@ def test_no_prevent_document_uploads(journalist_app, test_admin): app.post(url_for('admin.update_submission_preferences'), follow_redirects=True) assert InstanceConfig.get_current().allow_document_uploads is True + with InstrumentedApp(journalist_app) as ins: + app.post(url_for('admin.update_submission_preferences'), + follow_redirects=True) + ins.assert_message_flashed('Preferences saved.', 'submission-preferences-success') def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin): @@ -1682,7 +1691,7 @@ def test_delete_source_deletes_source_key(journalist_app, utils.db_helper.reply(journo, source, 2) # Source key exists - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['filesystem_id']) assert source_key is not None @@ -1690,7 +1699,7 @@ def test_delete_source_deletes_source_key(journalist_app, test_source['filesystem_id']) # Source key no longer exists - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['filesystem_id']) assert source_key is None diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -477,6 +477,31 @@ def test_authorized_user_can_get_single_reply(journalist_app, test_files, test_files['source'].replies[0].size +def test_reply_of_deleted_journalist(journalist_app, + test_files_deleted_journalist, + journalist_api_token): + with journalist_app.test_client() as app: + reply_uuid = test_files_deleted_journalist['source'].replies[0].uuid + uuid = test_files_deleted_journalist['source'].uuid + response = app.get(url_for('api.single_reply', + source_uuid=uuid, + reply_uuid=reply_uuid), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + assert response.json['uuid'] == reply_uuid + assert response.json['journalist_username'] == "deleted" + assert response.json['journalist_uuid'] == "deleted" + assert response.json['journalist_first_name'] == "" + assert response.json['journalist_last_name'] == "" + assert response.json['is_deleted_by_source'] is False + assert response.json['filename'] == \ + test_files_deleted_journalist['source'].replies[0].filename + assert response.json['size'] == \ + test_files_deleted_journalist['source'].replies[0].size + + def test_authorized_user_can_delete_single_submission(journalist_app, test_submissions, journalist_api_token): @@ -627,7 +652,7 @@ def test_authorized_user_can_add_reply(journalist_app, journalist_api_token, # First we must encrypt the reply, or it will get rejected # by the server. - source_key = current_app.crypto_util.getkey( + source_key = current_app.crypto_util.get_fingerprint( test_source['source'].filesystem_id) reply_content = current_app.crypto_util.gpg.encrypt( 'This is a plaintext reply', source_key).data diff --git a/securedrop/tests/test_qa_loader.py b/securedrop/tests/test_qa_loader.py --- a/securedrop/tests/test_qa_loader.py +++ b/securedrop/tests/test_qa_loader.py @@ -5,4 +5,4 @@ def test_load_data(journalist_app, config): # Use the journalist_app fixture to init the DB - QaLoader(config, multiplier=1).load() + QaLoader(config).load() diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -83,7 +83,7 @@ def test_generate(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 - session_codename = session['codename'] + session_codename = next(iter(session['codenames'].values())) text = resp.data.decode('utf-8') assert "This codename is what you will use in future visits" in text @@ -113,11 +113,13 @@ def test_create_new_source(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 - resp = app.post(url_for('main.create'), follow_redirects=True) + tab_id = next(iter(session['codenames'].keys())) + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) assert session['logged_in'] is True # should be redirected to /lookup text = resp.data.decode('utf-8') assert "Submit Files" in text + assert 'codenames' not in session def test_generate_too_long_codename(source_app): @@ -143,17 +145,18 @@ def test_create_duplicate_codename_logged_in_not_in_session(source_app): with source_app.test_client() as app: resp = app.get(url_for('main.generate')) assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) # Create a source the first time - resp = app.post(url_for('main.create'), follow_redirects=True) + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) assert resp.status_code == 200 codename = session['codename'] with source_app.test_client() as app: # Attempt to add the same source with app.session_transaction() as sess: - sess['codename'] = codename - resp = app.post(url_for('main.create'), follow_redirects=True) + sess['codenames'] = {tab_id: codename} + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) logger.assert_called_once() assert ("Attempt to create a source with duplicate codename" in logger.call_args[0][0]) @@ -163,26 +166,31 @@ def test_create_duplicate_codename_logged_in_not_in_session(source_app): def test_create_duplicate_codename_logged_in_in_session(source_app): - with patch.object(source.app.logger, 'error') as logger: - with source_app.test_client() as app: - resp = app.get(url_for('main.generate')) - assert resp.status_code == 200 - - # Create a source the first time - resp = app.post(url_for('main.create'), follow_redirects=True) - assert resp.status_code == 200 + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) - # Attempt to add the same source - resp = app.post(url_for('main.create'), follow_redirects=True) - logger.assert_called_once() - assert ("Attempt to create a source with duplicate codename" - in logger.call_args[0][0]) - assert resp.status_code == 500 - assert 'codename' not in session + # Create a source the first time + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) + assert resp.status_code == 200 + codename = session['codename'] + logged_in = session['logged_in'] - # Reproducer for bug #4361 - resp = app.post(url_for('main.index'), follow_redirects=True) - assert 'logged_in' not in session + # Attempt to add another source in the same session + with source_app.test_client() as app: + resp = app.get(url_for('main.generate')) + assert resp.status_code == 200 + tab_id = next(iter(session['codenames'].keys())) + with app.session_transaction() as sess: + sess['codename'] = codename + sess['logged_in'] = logged_in + resp = app.post(url_for('main.create'), data={'tab_id': tab_id}, follow_redirects=True) + assert resp.status_code == 200 + assert session['codename'] == codename + text = resp.data.decode('utf-8') + assert "You are already logged in." in text + assert "Submit Files" in text def test_lookup(source_app): @@ -243,7 +251,10 @@ def test_login_and_logout(source_app): assert 'logged_in' not in session assert 'codename' not in session text = resp.data.decode('utf-8') - assert 'Thank you for exiting your session!' in text + + # This is part of the logout page message instructing users + # to click the 'New Identity' icon + assert 'This will clear your Tor browser activity data' in text def test_user_must_log_in_for_protected_views(source_app): @@ -341,11 +352,9 @@ def test_submit_empty_message(source_app): def test_submit_big_message(source_app): - ''' - When the message is larger than 512KB it's written to disk instead of - just residing in memory. Make sure the different return type of - SecureTemporaryFile is handled as well as BytesIO. - ''' + """ + Test the message size limit. + """ with source_app.test_client() as app: new_codename(app, session) _dummy_submission(app) @@ -355,7 +364,7 @@ def test_submit_big_message(source_app): follow_redirects=True) assert resp.status_code == 200 text = resp.data.decode('utf-8') - assert "Thanks! We received your message" in text + assert "Message text too long." in text def test_submit_file(source_app): @@ -542,7 +551,7 @@ def test_why_journalist_key(source_app): resp = app.get(url_for('info.why_download_journalist_pubkey')) assert resp.status_code == 200 text = resp.data.decode('utf-8') - assert "Why download the journalist's public key?" in text + assert "Why download the team's public key?" in text def test_metadata_route(config, source_app): @@ -558,6 +567,32 @@ def test_metadata_route(config, source_app): assert resp.json.get('server_os') == '16.04' assert resp.json.get('supported_languages') ==\ config.SUPPORTED_LOCALES + assert resp.json.get('v2_source_url') is None + assert resp.json.get('v3_source_url') is None + + +def test_metadata_v2_url(config, source_app): + onion_test_url = "abcdabcdabcdabcd.onion" + with patch.object(source_app_api, "get_sourcev2_url") as mocked_v2_url: + mocked_v2_url.return_value = (onion_test_url) + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert resp.json.get('v2_source_url') == onion_test_url + assert resp.json.get('v3_source_url') is None + + +def test_metadata_v3_url(config, source_app): + onion_test_url = "abcdefghabcdefghabcdefghabcdefghabcdefghabcdefghabcdefgh.onion" + with patch.object(source_app_api, "get_sourcev3_url") as mocked_v3_url: + mocked_v3_url.return_value = (onion_test_url) + with source_app.test_client() as app: + resp = app.get(url_for('api.metadata')) + assert resp.status_code == 200 + assert resp.headers.get('Content-Type') == 'application/json' + assert resp.json.get('v2_source_url') is None + assert resp.json.get('v3_source_url') == onion_test_url def test_login_with_overly_long_codename(source_app): @@ -672,7 +707,7 @@ def test_source_session_expiration(config, source_app): assert not session text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_source_session_expiration_create(config, source_app): @@ -697,7 +732,7 @@ def test_source_session_expiration_create(config, source_app): assert not session text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_csrf_error_page(config, source_app): @@ -709,7 +744,7 @@ def test_csrf_error_page(config, source_app): resp = app.post(url_for('main.create'), follow_redirects=True) text = resp.data.decode('utf-8') - assert 'Your session timed out due to inactivity' in text + assert 'You were logged out due to inactivity' in text def test_source_can_only_delete_own_replies(source_app): diff --git a/securedrop/tests/test_source_utils.py b/securedrop/tests/test_source_utils.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/test_source_utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +import os + +from source_app.utils import check_url_file + + +def test_check_url_file(config): + + assert check_url_file("nosuchfile", "whatever") is None + + try: + def write_url_file(path, content): + url_file = open(path, "w") + url_file.write("{}\n".format(content)) + + url_path = "test_source_url" + + onion_test_url = "abcdabcdabcdabcd.onion" + write_url_file(url_path, onion_test_url) + assert check_url_file(url_path, r"^[a-z0-9]{16}\.onion$") == onion_test_url + + onion_test_url = "abcdefghabcdefghabcdefghabcdefghabcdefghabcdefghabcdefgh.onion" + write_url_file(url_path, onion_test_url) + assert check_url_file(url_path, r"^[a-z0-9]{56}\.onion$") == onion_test_url + + write_url_file(url_path, "NO.onion") + assert check_url_file(url_path, r"^[a-z0-9]{56}\.onion$") is None + finally: + if os.path.exists(url_path): + os.unlink(url_path) diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -39,6 +39,17 @@ def init_journalist(first_name=None, last_name=None, is_admin=False): return user, user_pw +def delete_journalist(journalist): + """Deletes a journalist from the database. + + :param models.Journalist journalist: The journalist to delete + + :returns: None + """ + db.session.delete(journalist) + db.session.commit() + + def reply(journalist, source, num_replies): """Generates and submits *num_replies* replies to *source* from *journalist*. Returns reply objects as a list. @@ -60,7 +71,7 @@ def reply(journalist, source, num_replies): source.journalist_filename) current_app.crypto_util.encrypt( str(os.urandom(1)), - [current_app.crypto_util.getkey(source.filesystem_id), + [current_app.crypto_util.get_fingerprint(source.filesystem_id), config.JOURNALIST_KEY], current_app.storage.path(source.filesystem_id, fname)) @@ -173,6 +184,6 @@ def new_codename(client, session): """Helper function to go through the "generate codename" flow. """ client.get('/generate') - codename = session['codename'] - client.post('/create') + tab_id, codename = next(iter(session['codenames'].items())) + client.post('/create', data={'tab_id': tab_id}) return codename
deleted journalist causes 500 on GET /replies API endpoint ## Description Deleted journalist causes GET /replies endpoint to 500 if there are any replies from that deleted journalist. ## Steps to Reproduce 0. Submit a document or message as a source. 1. Reply to the source from journalist account A. 2. Delete the account of journalist account A (from another admin account). 3. `GET /api/v1/replies` ## Expected Behavior 200 OK ## Actual Behavior ``` 172.17.0.1 - - [31/Mar/2020 20:57:10] "GET /api/v1/replies HTTP/1.1" 500 - Traceback (most recent call last): File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 48, in decorated_function return f(*args, **kwargs) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 310, in get_all_replies {'replies': [reply.to_json() for reply in replies]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/journalist_app/api.py", line 310, in <listcomp> {'replies': [reply.to_json() for reply in replies]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc2/securedrop/securedrop/models.py", line 289, in to_json 'journalist_username': self.journalist.username, AttributeError: 'NoneType' object has no attribute 'username' ``` ## Comments We should handle where the `journalist` is `None` [here](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/models.py#L289-L292) It would be wise to also add test data to `create-dev-data.py` for the deleted journalist scenario (since that is used for development of securedrop-client). Update kernels to 4.14.169 or later ## Description Updating will provide improve cache handling to mitigate CVE-2020-0549 (https://cacheoutattack.com/) A microcode update will be provided by Intel at a later date. In order to exploit this vulnerability, an attacker requires: 1. Local code execution on the server 2. A CPU that supports Intel TSX We do disable TSX in the kernel config: https://github.com/freedomofpress/ansible-role-grsecurity-build/blob/master/files/config-securedrop-4.14#L626 , so this vulnerability may not even be exploitable by an attacker. Since the likelihood of exploitation is extremely unlikely. I propose we update Kernels as part of the 1.3.0 release.
2020-05-14T01:07:29Z
[]
[]
freedomofpress/securedrop
5,279
freedomofpress__securedrop-5279
[ "5145" ]
1346be8c5617091dcff4757a389db6e5ab807c20
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -12,7 +12,8 @@ from db import db from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, WrongPasswordException, FirstOrLastNameError, LoginThrottledException, - BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) + BadTokenException, SourceStar, PasswordError, Submission, RevokedToken, + InvalidPasswordLength) from store import add_checksum_for_file import typing @@ -77,7 +78,8 @@ def validate_user(username, password, token, error_message=None): except (InvalidUsernameException, BadTokenException, WrongPasswordException, - LoginThrottledException) as e: + LoginThrottledException, + InvalidPasswordLength) as e: current_app.logger.error("Login for '{}' failed: {}".format( username, e)) if not error_message:
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -614,6 +614,21 @@ def test_max_password_length(): password=overly_long_password) +def test_login_password_too_long(journalist_app, test_journo, mocker): + mocked_error_logger = mocker.patch('journalist.app.logger.error') + with journalist_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(username=test_journo['username'], + password='a' * (Journalist.MAX_PASSWORD_LEN + 1), + token=TOTP(test_journo['otp_secret']).now())) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + mocked_error_logger.assert_called_once_with( + "Login for '{}' failed: Password too long (len={})".format( + test_journo['username'], Journalist.MAX_PASSWORD_LEN + 1)) + + def test_min_password_length(): """Creating a Journalist with a password that is smaller than the minimum password length should raise an exception. This uses the
journalist login with long passphrase causes internal server error ## Description When logging in to the journalist interface, entering a passphrase that's too long raises `models.InvalidPasswordLength`, which is not caught properly, so ends up handled at line 97 of `/var/www/securedrop/journalist_app/__init__.py`, in `_handle_http_exception`, where `AttributeError: 'InvalidPasswordLength' object has no attribute 'code'` results in the plain Apache Internal Server Error page being delivered to the visitor. ## Steps to Reproduce This can be reproduced with the dev server. - `make dev` - visit the journalist interface at http://localhost:8081 - try to log in with a passphrase longer than 128 characters ## Expected Behavior The server should return a flashed error indicating the login failed. ## Actual Behavior The improperly handled exception lands the visitor at a dead-end Internal Server Error page.
@rmol Will look into this. @rmol Wrapping the `raise InvalidPasswordLength(password)` in a `try-catch` block like: ``` try: raise InvalidPasswordLength(passphrase) except InvalidPasswordLength as error: str(error) ```` would work I think. Update: Exception was just needed to be handled in `journalist_app/utils.py`
2020-05-24T18:55:46Z
[]
[]
freedomofpress/securedrop
5,284
freedomofpress__securedrop-5284
[ "5232" ]
ebc193ca4c42e3ecf930077653e0404327a7d826
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py --- a/securedrop/journalist_app/forms.py +++ b/securedrop/journalist_app/forms.py @@ -37,10 +37,16 @@ def name_length_validation(form, field): .format(max_chars=Journalist.MAX_NAME_LEN))) +def check_invalid_usernames(form, field): + if field.data in Journalist.INVALID_USERNAMES: + raise ValidationError(gettext( + "This username is invalid because it is reserved for internal use by the software.")) + + class NewUserForm(FlaskForm): username = StringField('username', validators=[ InputRequired(message=gettext('This field is required.')), - minimum_length_validation + minimum_length_validation, check_invalid_usernames ]) first_name = StringField('first_name', validators=[name_length_validation, Optional()]) last_name = StringField('last_name', validators=[name_length_validation, Optional()]) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -414,6 +414,7 @@ class Journalist(db.Model): MIN_USERNAME_LEN = 3 MIN_NAME_LEN = 0 MAX_NAME_LEN = 100 + INVALID_USERNAMES = ['deleted'] def __init__(self, username: str, @@ -641,12 +642,18 @@ def login(cls, username: str, password: str, token: str) -> 'Journalist': + try: user = Journalist.query.filter_by(username=username).one() except NoResultFound: raise InvalidUsernameException( "invalid username '{}'".format(username)) + if user.username in Journalist.INVALID_USERNAMES and \ + user.uuid in Journalist.INVALID_USERNAMES: + raise InvalidUsernameException( + "Invalid username") + if LOGIN_HARDENING: cls.throttle_login(user)
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -322,6 +322,28 @@ def _add_user(self, username, first_name="", last_name="", is_admin=False, hotp= self.wait_for(lambda: self.driver.find_element_by_id("check-token")) + def _admin_adds_a_user_with_invalid_username(self): + self.safe_click_by_id("add-user") + + self.wait_for(lambda: self.driver.find_element_by_id("username")) + + if not hasattr(self, "accept_languages"): + # The add user page has a form with an "ADD USER" button + btns = self.driver.find_elements_by_tag_name("button") + assert "ADD USER" in [el.text for el in btns] + + invalid_username = 'deleted' + + self.safe_send_keys_by_css_selector('input[name="username"]', invalid_username) + + self.safe_click_by_css_selector("button[type=submit]") + + self.wait_for(lambda: self.driver.find_element_by_css_selector(".form-validation-error")) + + error_msg = self.driver.find_element_by_css_selector(".form-validation-error") + assert "This username is invalid because it is reserved for internal use " \ + "by the software." in error_msg.text + def _admin_adds_a_user(self, is_admin=False, new_username=""): self.safe_click_by_id("add-user") diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py --- a/securedrop/tests/functional/test_admin_interface.py +++ b/securedrop/tests/functional/test_admin_interface.py @@ -56,6 +56,12 @@ def test_ossec_alert_button(self): self._admin_visits_system_config_page() self._admin_can_send_test_alert() + def test_admin_adds_user_with_invalid_username(self): + self._admin_logs_in() + self._admin_visits_admin_interface() + # Add an user with invalid username + self._admin_adds_a_user_with_invalid_username() + def test_admin_adds_admin_user(self): self._admin_logs_in() self._admin_visits_admin_interface() diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -26,7 +26,7 @@ from db import db from models import (InvalidPasswordLength, InstanceConfig, Journalist, Reply, Source, - Submission) + InvalidUsernameException, Submission) from .utils.instrument import InstrumentedApp # Smugly seed the RNG for deterministic testing @@ -1076,6 +1076,66 @@ def test_admin_add_user(journalist_app, test_admin): uid=new_user.id)) +def test_admin_add_user_with_invalid_username(journalist_app, test_admin): + username = 'deleted' + + with journalist_app.test_client() as app: + _login_user(app, test_admin['username'], test_admin['password'], test_admin['otp_secret']) + + resp = app.post(url_for('admin.add_user'), + data=dict(username=username, + first_name='', + last_name='', + password=VALID_PASSWORD, + is_admin=None)) + + assert "This username is invalid because it is reserved for internal use by the software." \ + in resp.data.decode('utf-8') + + +def test_deleted_user_cannot_login(journalist_app): + username = 'deleted' + uuid = 'deleted' + + # Create a user with username and uuid as deleted + with journalist_app.app_context(): + user, password = utils.db_helper.init_journalist(is_admin=False) + otp_secret = user.otp_secret + user.username = username + user.uuid = uuid + db.session.add(user) + db.session.commit() + + # Verify that deleted user is not able to login + with journalist_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(username=username, + password=password, + token=otp_secret)) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + + +def test_deleted_user_cannot_login_exception(journalist_app): + username = 'deleted' + uuid = 'deleted' + + # Create a user with username and uuid as deleted + with journalist_app.app_context(): + user, password = utils.db_helper.init_journalist(is_admin=False) + otp_secret = user.otp_secret + user.username = username + user.uuid = uuid + db.session.add(user) + db.session.commit() + + with pytest.raises(InvalidUsernameException): + Journalist.login(username, + password, + TOTP(otp_secret).now()) + + def test_admin_add_user_without_username(journalist_app, test_admin): with journalist_app.test_client() as app: _login_user(app, test_admin['username'], test_admin['password'],
We should not allow journalist username `deleted` Because in https://github.com/freedomofpress/securedrop/pull/5178 we are now marking `deleted` as the name/uuid of any journalist account which is deleted from the system, we should not allow the term `deleted` as the journalist name (via the web interface). I think this will help in reducing confusion in future.
Good point, we should have a disallowed list of usernames and disallow the creation of new usernames with that. Of course there can be existing users that have this username so we can keep that in mind for client development, but we can distinguish legitimate users with the username `deleted` from the case where the journalist was deleted by the fact that in the latter case the uuid will also be `deleted`. This'll involve a string change so we can do in 1.4.0 @kushaldas @redshiftzero I would like to work on this. @prateekj117 Go for it! Please find us on https://gitter.im/freedomofpress/securedrop if we can help you get started on this -- you can also come join our standups Monday-Thursday at 4PM UTC here https://meet.google.com/ekb-kkhf-mrk (they're announced on Gitter). @eloquence Sure. @eloquence I am confused why `SecureDrop` doesn't use a `deleted_at` column strategy. This way, we also won't face issues like these. Probably original design decisions like this were motivated by wanting to ensure a minimum amount of historical data on the system in case of theft/seizure. Having records of when accounts were added/deleted seems pretty innocuous, but you never know. @zenmonkeykstop @redshiftzero Hmm, I agree. Though, why don't we just have a `deleted` column with a boolean value. This saves us from having extra information in database. Ok, it must be that way, because even after deletion person can be personally identified if we keep a separate `deleted` bool column.
2020-05-30T16:36:22Z
[]
[]
freedomofpress/securedrop
5,318
freedomofpress__securedrop-5318
[ "4216" ]
9c15a4e82439ab0d9e653850222290cc30001f08
diff --git a/admin/bootstrap.py b/admin/bootstrap.py --- a/admin/bootstrap.py +++ b/admin/bootstrap.py @@ -197,7 +197,18 @@ def envsetup(args, virtualenv_dir=VENV_DIR): else: sdlog.info("Virtualenv already exists, not creating") - install_pip_dependencies(args) + if args.t: + install_pip_dependencies(args, pip_install_cmd=[ + os.path.join(VENV_DIR, 'bin', 'pip3'), + 'install', + '--no-deps', + '-r', os.path.join(DIR, 'requirements-testinfra.txt'), + '--require-hashes', + '-U', '--upgrade-strategy', 'only-if-needed', ], + desc="dependencies with verification support") + else: + install_pip_dependencies(args) + if os.path.exists(os.path.join(DIR, 'setup.py')): install_pip_self(args) @@ -226,33 +237,36 @@ def install_pip_dependencies(args, pip_install_cmd=[ '-r', os.path.join(DIR, 'requirements.txt'), '--require-hashes', # Make sure to upgrade packages only if necessary. - '-U', '--upgrade-strategy', 'only-if-needed', -]): + '-U', '--upgrade-strategy', 'only-if-needed', ], + desc="Python dependencies" +): """ Install Python dependencies via pip into virtualenv. """ - sdlog.info("Checking Python dependencies for securedrop-admin") + sdlog.info("Checking {} for securedrop-admin".format(desc)) try: pip_output = subprocess.check_output(maybe_torify() + pip_install_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: sdlog.debug(e.output) - sdlog.error(("Failed to install pip dependencies. Check network" - " connection and try again.")) + sdlog.error(("Failed to install {}. Check network" + " connection and try again.".format(desc))) raise sdlog.debug(pip_output) if "Successfully installed" in str(pip_output): - sdlog.info("Python dependencies for securedrop-admin upgraded") + sdlog.info("{} for securedrop-admin upgraded".format(desc)) else: - sdlog.info("Python dependencies for securedrop-admin are up-to-date") + sdlog.info("{} for securedrop-admin are up-to-date".format(desc)) def parse_argv(argv): parser = argparse.ArgumentParser() parser.add_argument('-v', action='store_true', default=False, help="Increase verbosity on output") + parser.add_argument('-t', action='store_true', default=False, + help="Install additional test dependencies") parser.set_defaults(func=envsetup) subparsers = parser.add_subparsers() diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -753,6 +753,15 @@ def install_securedrop(args): cwd=args.ansible_path) +def verify_install(args): + """Run configuration tests against SecureDrop servers""" + + sdlog.info("Running configuration tests: ") + testinfra_cmd = ["./devops/scripts/run_prod_testinfra"] + return subprocess.check_call(testinfra_cmd, + cwd=os.getcwd()) + + def backup_securedrop(args): """Perform backup of the SecureDrop Application Server. Creates a tarball of submissions and server config, and fetches @@ -1050,6 +1059,10 @@ class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter, help=reset_admin_access.__doc__) parse_reset_ssh.set_defaults(func=reset_admin_access) + parse_verify = subparsers.add_parser('verify', + help=verify_install.__doc__) + parse_verify.set_defaults(func=verify_install) + args = parser.parse_args(argv) if getattr(args, 'func', None) is None: print('Please specify an operation.\n') diff --git a/molecule/testinfra/staging/conftest.py b/molecule/testinfra/conftest.py similarity index 100% rename from molecule/testinfra/staging/conftest.py rename to molecule/testinfra/conftest.py
diff --git a/molecule/testinfra/staging/app-code/test_haveged.py b/molecule/testinfra/app-code/test_haveged.py similarity index 92% rename from molecule/testinfra/staging/app-code/test_haveged.py rename to molecule/testinfra/app-code/test_haveged.py --- a/molecule/testinfra/staging/app-code/test_haveged.py +++ b/molecule/testinfra/app-code/test_haveged.py @@ -1,4 +1,7 @@ -testinfra_hosts = ["app-staging"] +import pytest + +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] def test_haveged_config(host): diff --git a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py b/molecule/testinfra/app-code/test_securedrop_app_code.py similarity index 97% rename from molecule/testinfra/staging/app-code/test_securedrop_app_code.py rename to molecule/testinfra/app-code/test_securedrop_app_code.py --- a/molecule/testinfra/staging/app-code/test_securedrop_app_code.py +++ b/molecule/testinfra/app-code/test_securedrop_app_code.py @@ -1,8 +1,8 @@ import pytest -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] def test_apache_default_docroot_is_absent(host): @@ -38,6 +38,7 @@ def test_securedrop_application_apt_dependencies(host, package): assert host.package(package).is_installed [email protected]_in_prod def test_securedrop_application_test_locale(host): """ Ensure both SecureDrop DEFAULT_LOCALE and SUPPORTED_LOCALES are present. @@ -52,6 +53,7 @@ def test_securedrop_application_test_locale(host): assert "\nSUPPORTED_LOCALES = ['el', 'ar', 'en_US']\n" in securedrop_config.content_string [email protected]_in_prod def test_securedrop_application_test_journalist_key(host): """ Ensure the SecureDrop Application GPG public key file is present. diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py b/molecule/testinfra/app-code/test_securedrop_rqrequeue.py similarity index 95% rename from molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py rename to molecule/testinfra/app-code/test_securedrop_rqrequeue.py --- a/molecule/testinfra/staging/app-code/test_securedrop_rqrequeue.py +++ b/molecule/testinfra/app-code/test_securedrop_rqrequeue.py @@ -1,7 +1,7 @@ import pytest - -testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] def test_securedrop_rqrequeue_service(host): diff --git a/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py b/molecule/testinfra/app-code/test_securedrop_rqworker.py similarity index 95% rename from molecule/testinfra/staging/app-code/test_securedrop_rqworker.py rename to molecule/testinfra/app-code/test_securedrop_rqworker.py --- a/molecule/testinfra/staging/app-code/test_securedrop_rqworker.py +++ b/molecule/testinfra/app-code/test_securedrop_rqworker.py @@ -1,7 +1,7 @@ import pytest - -testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] def test_securedrop_rqworker_service(host): diff --git a/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py b/molecule/testinfra/app-code/test_securedrop_shredder_configuration.py similarity index 95% rename from molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py rename to molecule/testinfra/app-code/test_securedrop_shredder_configuration.py --- a/molecule/testinfra/staging/app-code/test_securedrop_shredder_configuration.py +++ b/molecule/testinfra/app-code/test_securedrop_shredder_configuration.py @@ -1,7 +1,7 @@ import pytest - -testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] def test_securedrop_shredder_service(host): diff --git a/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py b/molecule/testinfra/app-code/test_securedrop_source_deleter_configuration.py similarity index 95% rename from molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py rename to molecule/testinfra/app-code/test_securedrop_source_deleter_configuration.py --- a/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py +++ b/molecule/testinfra/app-code/test_securedrop_source_deleter_configuration.py @@ -1,7 +1,7 @@ import pytest - -testinfra_hosts = ["app-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] def test_securedrop_source_deleter_service(host): diff --git a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py b/molecule/testinfra/app/apache/test_apache_journalist_interface.py similarity index 99% rename from molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py rename to molecule/testinfra/app/apache/test_apache_journalist_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_journalist_interface.py +++ b/molecule/testinfra/app/apache/test_apache_journalist_interface.py @@ -2,8 +2,8 @@ import re -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] # Setting once so it can be reused in multiple tests. wanted_apache_headers = [ @@ -28,6 +28,7 @@ 'Header set Referrer-Policy "no-referrer"', ] + # Test is not DRY; haven't figured out how to parametrize on # multiple inputs, so explicitly redeclaring test logic. @pytest.mark.parametrize("header", wanted_apache_headers) diff --git a/molecule/testinfra/staging/app/apache/test_apache_service.py b/molecule/testinfra/app/apache/test_apache_service.py similarity index 97% rename from molecule/testinfra/staging/app/apache/test_apache_service.py rename to molecule/testinfra/app/apache/test_apache_service.py --- a/molecule/testinfra/staging/app/apache/test_apache_service.py +++ b/molecule/testinfra/app/apache/test_apache_service.py @@ -1,8 +1,8 @@ import pytest -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] @pytest.mark.parametrize("apache_site", [ diff --git a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py b/molecule/testinfra/app/apache/test_apache_source_interface.py similarity index 97% rename from molecule/testinfra/staging/app/apache/test_apache_source_interface.py rename to molecule/testinfra/app/apache/test_apache_source_interface.py --- a/molecule/testinfra/staging/app/apache/test_apache_source_interface.py +++ b/molecule/testinfra/app/apache/test_apache_source_interface.py @@ -1,8 +1,8 @@ import pytest import re -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] @pytest.mark.parametrize("header", securedrop_test_vars.wanted_apache_headers) diff --git a/molecule/testinfra/staging/app/apache/test_apache_system_config.py b/molecule/testinfra/app/apache/test_apache_system_config.py similarity index 94% rename from molecule/testinfra/staging/app/apache/test_apache_system_config.py rename to molecule/testinfra/app/apache/test_apache_system_config.py --- a/molecule/testinfra/staging/app/apache/test_apache_system_config.py +++ b/molecule/testinfra/app/apache/test_apache_system_config.py @@ -1,8 +1,8 @@ import pytest import re -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] @pytest.mark.parametrize("package", [ @@ -158,9 +158,11 @@ def test_apache_logfiles_no_extras(host): `test_apache_logfiles_present` config test. Here, we confirm that the total number of Apache logfiles exactly matches the number permitted on the Application Server, whether staging or prod. + Long-running instances may have rotated and gzipped logfiles, so this + test should only look for files ending in '.log'. """ # We need elevated privileges to read files inside /var/log/apache2 with host.sudo(): - c = host.run("find /var/log/apache2 -mindepth 1 | wc -l") + c = host.run("find /var/log/apache2 -mindepth 1 -name '*.log' | wc -l") assert int(c.stdout) == \ len(securedrop_test_vars.allowed_apache_logfiles) diff --git a/molecule/testinfra/staging/app/test_app_network.py b/molecule/testinfra/app/test_app_network.py similarity index 95% rename from molecule/testinfra/staging/app/test_app_network.py rename to molecule/testinfra/app/test_app_network.py --- a/molecule/testinfra/staging/app/test_app_network.py +++ b/molecule/testinfra/app/test_app_network.py @@ -5,10 +5,11 @@ from jinja2 import Template -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] [email protected]_in_prod def test_app_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison diff --git a/molecule/testinfra/staging/app/test_apparmor.py b/molecule/testinfra/app/test_apparmor.py similarity index 99% rename from molecule/testinfra/staging/app/test_apparmor.py rename to molecule/testinfra/app/test_apparmor.py --- a/molecule/testinfra/staging/app/test_apparmor.py +++ b/molecule/testinfra/app/test_apparmor.py @@ -1,8 +1,8 @@ import pytest -testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] @pytest.mark.parametrize('pkg', ['apparmor', 'apparmor-utils']) diff --git a/molecule/testinfra/staging/app/test_appenv.py b/molecule/testinfra/app/test_appenv.py similarity index 96% rename from molecule/testinfra/staging/app/test_appenv.py rename to molecule/testinfra/app/test_appenv.py --- a/molecule/testinfra/staging/app/test_appenv.py +++ b/molecule/testinfra/app/test_appenv.py @@ -1,8 +1,8 @@ import os.path import pytest -testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] @pytest.mark.parametrize('exp_pip_pkg', sdvars.pip_deps) @@ -12,6 +12,7 @@ def test_app_pip_deps(host, exp_pip_pkg): assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version'] [email protected]_in_prod def test_app_wsgi(host): """ ensure logging is enabled for source interface in staging """ f = host.file("/var/www/source.wsgi") @@ -63,6 +64,7 @@ def test_supervisor_not_installed(host): assert host.package("supervisor").is_installed is False [email protected]_in_prod def test_gpg_key_in_keyring(host): """ ensure test gpg key is present in app keyring """ with host.sudo(sdvars.securedrop_user): diff --git a/molecule/testinfra/staging/app/test_ossec_agent.py b/molecule/testinfra/app/test_ossec_agent.py similarity index 97% rename from molecule/testinfra/staging/app/test_ossec_agent.py rename to molecule/testinfra/app/test_ossec_agent.py --- a/molecule/testinfra/staging/app/test_ossec_agent.py +++ b/molecule/testinfra/app/test_ossec_agent.py @@ -3,7 +3,7 @@ import pytest sdvars = pytest.securedrop_test_vars -testinfra_hosts = ["app", "app-staging"] +testinfra_hosts = [sdvars.app_hostname] def test_hosts_files(host): diff --git a/molecule/testinfra/staging/app/test_paxctld.py b/molecule/testinfra/app/test_paxctld.py similarity index 94% rename from molecule/testinfra/staging/app/test_paxctld.py rename to molecule/testinfra/app/test_paxctld.py --- a/molecule/testinfra/staging/app/test_paxctld.py +++ b/molecule/testinfra/app/test_paxctld.py @@ -2,8 +2,8 @@ import re -testinfra_hosts = ["app-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.app_hostname] def test_paxctld_installed(host): diff --git a/molecule/testinfra/staging/app/test_tor_config.py b/molecule/testinfra/app/test_tor_config.py similarity index 96% rename from molecule/testinfra/staging/app/test_tor_config.py rename to molecule/testinfra/app/test_tor_config.py --- a/molecule/testinfra/staging/app/test_tor_config.py +++ b/molecule/testinfra/app/test_tor_config.py @@ -1,8 +1,8 @@ import pytest import re -testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] @pytest.mark.parametrize('package', [ @@ -61,6 +61,7 @@ def test_tor_torrc_sandbox(host): assert not f.contains("^.*Sandbox.*$") [email protected]_in_prod def test_tor_v2_onion_url_readable_by_app(host): v2_url_filepath = "/var/lib/securedrop/source_v2_url" with host.sudo(): @@ -71,6 +72,7 @@ def test_tor_v2_onion_url_readable_by_app(host): assert re.search(r"^[a-z0-9]{16}\.onion$", f.content_string) [email protected]_in_prod def test_tor_v3_onion_url_readable_by_app(host): v3_url_filepath = "/var/lib/securedrop/source_v3_url" with host.sudo(): diff --git a/molecule/testinfra/staging/app/test_tor_hidden_services.py b/molecule/testinfra/app/test_tor_hidden_services.py similarity index 95% rename from molecule/testinfra/staging/app/test_tor_hidden_services.py rename to molecule/testinfra/app/test_tor_hidden_services.py --- a/molecule/testinfra/staging/app/test_tor_hidden_services.py +++ b/molecule/testinfra/app/test_tor_hidden_services.py @@ -2,10 +2,12 @@ import re -testinfra_hosts = ["app-staging"] sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname] - +# Prod Tor services may have unexpected configs +# TODO: read from admin workstation site-specific file if available [email protected]_in_prod @pytest.mark.parametrize('tor_service', sdvars.tor_services) def test_tor_service_directories(host, tor_service): """ @@ -19,6 +21,7 @@ def test_tor_service_directories(host, tor_service): assert f.group == "debian-tor" [email protected]_in_prod @pytest.mark.parametrize('tor_service', sdvars.tor_services) def test_tor_service_hostnames(host, tor_service): """ @@ -62,6 +65,7 @@ def test_tor_service_hostnames(host, tor_service): assert re.search("^{}$".format(ths_hostname_regex_v3), f.content_string) [email protected]_in_prod @pytest.mark.parametrize('tor_service', sdvars.tor_services) def test_tor_services_config(host, tor_service): """ diff --git a/molecule/testinfra/staging/common/test_cron_apt.py b/molecule/testinfra/common/test_cron_apt.py similarity index 98% rename from molecule/testinfra/staging/common/test_cron_apt.py rename to molecule/testinfra/common/test_cron_apt.py --- a/molecule/testinfra/staging/common/test_cron_apt.py +++ b/molecule/testinfra/common/test_cron_apt.py @@ -3,6 +3,7 @@ test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] @pytest.mark.parametrize('dependency', [ diff --git a/molecule/testinfra/staging/common/test_fpf_apt_repo.py b/molecule/testinfra/common/test_fpf_apt_repo.py similarity index 97% rename from molecule/testinfra/staging/common/test_fpf_apt_repo.py rename to molecule/testinfra/common/test_fpf_apt_repo.py --- a/molecule/testinfra/staging/common/test_fpf_apt_repo.py +++ b/molecule/testinfra/common/test_fpf_apt_repo.py @@ -3,6 +3,7 @@ test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_fpf_apt_repo_present(host): diff --git a/molecule/testinfra/staging/common/test_grsecurity.py b/molecule/testinfra/common/test_grsecurity.py similarity index 97% rename from molecule/testinfra/staging/common/test_grsecurity.py rename to molecule/testinfra/common/test_grsecurity.py --- a/molecule/testinfra/staging/common/test_grsecurity.py +++ b/molecule/testinfra/common/test_grsecurity.py @@ -2,7 +2,9 @@ import re -KERNEL_VERSION = pytest.securedrop_test_vars.grsec_version +sdvars = pytest.securedrop_test_vars +KERNEL_VERSION = sdvars.grsec_version +testinfra_hosts = [sdvars.app_hostname, sdvars.monitor_hostname] def test_ssh_motd_disabled(host): @@ -88,6 +90,7 @@ def test_grsecurity_sysctl_options(host, sysctl_opt): assert host.sysctl(sysctl_opt[0]) == sysctl_opt[1] [email protected]_in_prod @pytest.mark.parametrize('paxtest_check', [ "Executable anonymous mapping", "Executable bss", @@ -122,6 +125,7 @@ def test_grsecurity_paxtest(host, paxtest_check): assert re.search(regex, c.stdout) [email protected]_in_prod def test_grub_pc_marked_manual(host): """ Ensure the `grub-pc` packaged is marked as manually installed. diff --git a/molecule/testinfra/staging/common/test_ip6tables.py b/molecule/testinfra/common/test_ip6tables.py similarity index 75% rename from molecule/testinfra/staging/common/test_ip6tables.py rename to molecule/testinfra/common/test_ip6tables.py --- a/molecule/testinfra/staging/common/test_ip6tables.py +++ b/molecule/testinfra/common/test_ip6tables.py @@ -1,3 +1,9 @@ +import pytest + +test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] + + def test_ip6tables_drop_everything(host): """ Ensure that all IPv6 packets are dropped by default. diff --git a/molecule/testinfra/staging/common/test_platform.py b/molecule/testinfra/common/test_platform.py similarity index 86% rename from molecule/testinfra/staging/common/test_platform.py rename to molecule/testinfra/common/test_platform.py --- a/molecule/testinfra/staging/common/test_platform.py +++ b/molecule/testinfra/common/test_platform.py @@ -1,3 +1,8 @@ +import pytest + +test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] + # We expect Ubuntu Xenial SUPPORTED_CODENAMES = ('xenial') SUPPORTED_RELEASES = ('16.04') diff --git a/molecule/testinfra/staging/common/test_release_upgrades.py b/molecule/testinfra/common/test_release_upgrades.py similarity index 86% rename from molecule/testinfra/staging/common/test_release_upgrades.py rename to molecule/testinfra/common/test_release_upgrades.py --- a/molecule/testinfra/staging/common/test_release_upgrades.py +++ b/molecule/testinfra/common/test_release_upgrades.py @@ -1,3 +1,10 @@ +import pytest + + +test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] + + def test_release_manager_upgrade_channel(host): """ Ensures that the `do-release-upgrade` command will not diff --git a/molecule/testinfra/staging/common/test_system_hardening.py b/molecule/testinfra/common/test_system_hardening.py similarity index 97% rename from molecule/testinfra/staging/common/test_system_hardening.py rename to molecule/testinfra/common/test_system_hardening.py --- a/molecule/testinfra/staging/common/test_system_hardening.py +++ b/molecule/testinfra/common/test_system_hardening.py @@ -1,7 +1,8 @@ import pytest import re -testinfra_hosts = ["app", "app-staging", "mon", "mon-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname, sdvars.monitor_hostname] @pytest.mark.parametrize('sysctl_opt', [ diff --git a/molecule/testinfra/staging/common/test_tor_mirror.py b/molecule/testinfra/common/test_tor_mirror.py similarity index 95% rename from molecule/testinfra/staging/common/test_tor_mirror.py rename to molecule/testinfra/common/test_tor_mirror.py --- a/molecule/testinfra/staging/common/test_tor_mirror.py +++ b/molecule/testinfra/common/test_tor_mirror.py @@ -1,5 +1,8 @@ import pytest +test_vars = pytest.securedrop_test_vars +testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] + @pytest.mark.parametrize('repo_file', [ "/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list", diff --git a/molecule/testinfra/staging/common/test_user_config.py b/molecule/testinfra/common/test_user_config.py similarity index 95% rename from molecule/testinfra/staging/common/test_user_config.py rename to molecule/testinfra/common/test_user_config.py --- a/molecule/testinfra/staging/common/test_user_config.py +++ b/molecule/testinfra/common/test_user_config.py @@ -1,5 +1,10 @@ import re import textwrap +import pytest + + +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.app_hostname, sdvars.monitor_hostname] def test_sudoers_config(host): @@ -79,6 +84,7 @@ def test_tmux_installed(host): assert host.package("tmux").is_installed [email protected]_in_prod def test_sudoers_tmux_env_deprecated(host): """ Previous version of the Ansible config set the tmux config diff --git a/molecule/testinfra/staging/mon/test_mon_network.py b/molecule/testinfra/mon/test_mon_network.py similarity index 96% rename from molecule/testinfra/staging/mon/test_mon_network.py rename to molecule/testinfra/mon/test_mon_network.py --- a/molecule/testinfra/staging/mon/test_mon_network.py +++ b/molecule/testinfra/mon/test_mon_network.py @@ -5,10 +5,11 @@ from jinja2 import Template -testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.monitor_hostname] [email protected]_in_prod def test_mon_iptables_rules(host): # Build a dict of variables to pass to jinja for iptables comparison @@ -45,6 +46,7 @@ def test_mon_iptables_rules(host): assert iptables_expected == iptables [email protected]_in_prod @pytest.mark.parametrize('ossec_service', [ dict(host="0.0.0.0", proto="tcp", port=22, listening=True), dict(host="0.0.0.0", proto="udp", port=1514, listening=True), diff --git a/molecule/testinfra/staging/mon/test_ossec_ruleset.py b/molecule/testinfra/mon/test_ossec_ruleset.py similarity index 96% rename from molecule/testinfra/staging/mon/test_ossec_ruleset.py rename to molecule/testinfra/mon/test_ossec_ruleset.py --- a/molecule/testinfra/staging/mon/test_ossec_ruleset.py +++ b/molecule/testinfra/mon/test_ossec_ruleset.py @@ -1,10 +1,10 @@ import pytest import re -testinfra_hosts = ["mon-staging"] +sdvars = pytest.securedrop_test_vars +testinfra_hosts = [sdvars.monitor_hostname] alert_level_regex = re.compile(r"Level: '(\d+)'") rule_id_regex = re.compile(r"Rule id: '(\d+)'") -sdvars = pytest.securedrop_test_vars @pytest.mark.parametrize('log_event', diff --git a/molecule/testinfra/staging/mon/test_ossec_server.py b/molecule/testinfra/mon/test_ossec_server.py similarity index 98% rename from molecule/testinfra/staging/mon/test_ossec_server.py rename to molecule/testinfra/mon/test_ossec_server.py --- a/molecule/testinfra/staging/mon/test_ossec_server.py +++ b/molecule/testinfra/mon/test_ossec_server.py @@ -2,8 +2,8 @@ import pytest -testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.monitor_hostname] def test_ossec_connectivity(host): diff --git a/molecule/testinfra/staging/mon/test_postfix.py b/molecule/testinfra/mon/test_postfix.py similarity index 97% rename from molecule/testinfra/staging/mon/test_postfix.py rename to molecule/testinfra/mon/test_postfix.py --- a/molecule/testinfra/staging/mon/test_postfix.py +++ b/molecule/testinfra/mon/test_postfix.py @@ -2,8 +2,8 @@ import pytest -testinfra_hosts = ["mon-staging"] securedrop_test_vars = pytest.securedrop_test_vars +testinfra_hosts = [securedrop_test_vars.monitor_hostname] @pytest.mark.parametrize('header', [ diff --git a/molecule/testinfra/staging/ossec/test_journalist_mail.py b/molecule/testinfra/ossec/test_journalist_mail.py similarity index 100% rename from molecule/testinfra/staging/ossec/test_journalist_mail.py rename to molecule/testinfra/ossec/test_journalist_mail.py
Investigate running test_infra tests against production HW ## Description Currently, QA testing involves a "basic server testing" phase that duplicates some checks already present in the test_infra tests while missing a whole bunch of others. It would be useful to be able to automate this testing phase and do it in more detail using the test_infra suite. Failing that, a script that could be run from the Admin Workstation to do basic server testing would be a useful thing. ## User Stories As a SecureDrop Tester, I want to be able to automatically run basic server tests against production servers.
Tentatively adding to 1.4.0 milestone per today's sprint retrospective; please don't hesitate to nominate other issues on the QA automation front that should be higher priority or also added.
2020-06-16T23:11:53Z
[]
[]
freedomofpress/securedrop
5,325
freedomofpress__securedrop-5325
[ "5285" ]
f6bbe6c196fa4f2e9fb3890b087618fd46d3ffb4
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -24,6 +24,7 @@ """ import argparse +import ipaddress import logging import os import io @@ -33,6 +34,7 @@ import json import base64 import prompt_toolkit +from prompt_toolkit.document import Document from prompt_toolkit.validation import Validator, ValidationError import yaml from pkg_resources import parse_version @@ -48,6 +50,9 @@ EXIT_SUBPROCESS_ERROR = 1 EXIT_INTERRUPT = 2 +MAX_NAMESERVERS = 3 +LIST_SPLIT_RE = re.compile(r"\s*,\s*|\s+") + class FingerprintException(Exception): pass @@ -83,11 +88,34 @@ def validate(self, document): class ValidateIP(Validator): def validate(self, document): - if re.match(r'((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$', # lgtm [py/regex/unmatchable-dollar] # noqa: E501 - document.text): + try: + ipaddress.ip_address(document.text) return True - raise ValidationError( - message="An IP address must be something like 10.240.20.83") + except ValueError as e: + raise ValidationError(message=str(e)) + + class ValidateNameservers(Validator): + def validate(self, document): + candidates = LIST_SPLIT_RE.split(document.text) + if len(candidates) > MAX_NAMESERVERS: + raise ValidationError(message="Specify no more than three nameservers.") + try: + all(map(ipaddress.ip_address, candidates)) + except ValueError: + raise ValidationError( + message=( + "DNS server(s) should be a space/comma-separated list " + "of up to {} IP addresses" + ).format(MAX_NAMESERVERS) + ) + return True + + @staticmethod + def split_list(text): + """ + Splits a string containing a list of values separated by commas or whitespace. + """ + return LIST_SPLIT_RE.split(text) class ValidatePath(Validator): def __init__(self, basedir): @@ -273,10 +301,10 @@ def __init__(self, args): SiteConfig.ValidateNotEmpty(), None, lambda config: True], - ['dns_server', '8.8.8.8', str, - 'DNS server specified during installation', - SiteConfig.ValidateNotEmpty(), - None, + ['dns_server', ['8.8.8.8', '8.8.4.4'], list, + 'DNS server(s)', + SiteConfig.ValidateNameservers(), + SiteConfig.split_list, lambda config: True], ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str, 'Local filepath to public key for ' + @@ -393,14 +421,15 @@ def __init__(self, args): lambda config: True], ] - def load_and_update_config(self): + def load_and_update_config(self, validate: bool = True, prompt: bool = True): if self.exists(): - self.config = self.load() + self.config = self.load(validate) - return self.update_config() + return self.update_config(prompt) - def update_config(self): - self.config.update(self.user_prompt_config()) + def update_config(self, prompt: bool = True): + if prompt: + self.config.update(self.user_prompt_config()) self.save() self.validate_gpg_keys() self.validate_journalist_alert_email() @@ -559,10 +588,56 @@ def save(self): site_config_file, default_flow_style=False) - def load(self): + def clean_config(self, config: dict) -> dict: + """ + Cleans a loaded config without prompting. + + For every variable defined in self.desc, validate its value in + the supplied configuration dictionary, run the value through + its defined transformer, and add the result to a clean version + of the configuration. + + If no configuration variable triggers a ValidationError, the + clean configuration will be returned. + """ + clean_config = {} + clean_config.update(config) + for desc in self.desc: + var, default, vartype, prompt, validator, transform, condition = desc + if var in clean_config: + value = clean_config[var] + if isinstance(value, list): + text = " ".join(str(v) for v in value) + elif isinstance(value, bool): + text = "yes" if value else "no" + else: + text = str(value) + + if validator is not None: + try: + validator.validate(Document(text)) + except ValidationError as e: + sdlog.error(e) + sdlog.error( + 'Error loading configuration. ' + 'Please run "securedrop-admin sdconfig" again.' + ) + raise + clean_config[var] = transform(text) if transform else text + return clean_config + + def load(self, validate=True): + """ + Loads the site configuration file. + + If validate is True, then each configuration variable that has + an entry in self.desc is validated and transformed according + to current specifications. + """ try: with io.open(self.args.site_config) as site_config_file: - return yaml.safe_load(site_config_file) + c = yaml.safe_load(site_config_file) + return self.clean_config(c) if validate else c except IOError: sdlog.error("Config file missing, re-run with sdconfig") raise @@ -586,7 +661,7 @@ def setup_logger(verbose=False): def sdconfig(args): """Configure SD site settings""" - SiteConfig(args).load_and_update_config() + SiteConfig(args).load_and_update_config(validate=False) return 0 @@ -659,7 +734,7 @@ def find_or_generate_new_torv3_keys(args): def install_securedrop(args): """Install/Update SecureDrop""" - SiteConfig(args).load() + SiteConfig(args).load_and_update_config(prompt=False) sdlog.info("Now installing SecureDrop on remote servers.") sdlog.info("You will be prompted for the sudo password on the " diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -49,7 +49,7 @@ # General information about the project. project = u'SecureDrop' -copyright = u'2015-2019, Freedom of the Press Foundation' +copyright = u'2015-2020, Freedom of the Press Foundation' author = u'SecureDrop Team and Contributors' # The version info for the project you're documenting, acts as replacement for @@ -57,9 +57,9 @@ # built documents. # # The short X.Y version. -version = '1.3.0' +version = '1.4.0' # The full version, including alpha/beta/rc tags. -release = '1.3.0' +release = '1.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/install_files/securedrop-ossec-agent/var/ossec/checksdconfig.py b/install_files/securedrop-ossec-agent/var/ossec/checksdconfig.py new file mode 100755 --- /dev/null +++ b/install_files/securedrop-ossec-agent/var/ossec/checksdconfig.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import argparse +import subprocess +import sys + + +IPTABLES_RULES_UNCONFIGURED = { + "all": ["-P INPUT ACCEPT", "-P FORWARD ACCEPT", "-P OUTPUT ACCEPT"] +} + + +IPTABLES_RULES_DEFAULT_DROP = { + "policies": [ + "-P INPUT DROP", + "-P FORWARD DROP", + "-P OUTPUT DROP", + ], + "input": [ + '-A INPUT -m comment --comment "Drop and log all other incoming traffic" -j LOGNDROP', + ], + "output": [ + '-A OUTPUT -m comment --comment "Drop all other outgoing traffic" -j DROP', + ], + "logndrop": [ + ( + "-A LOGNDROP -p tcp -m limit --limit 5/min -j LOG --log-tcp-options --log-ip-options " + "--log-uid" + ), + "-A LOGNDROP -p udp -m limit --limit 5/min -j LOG --log-ip-options --log-uid", + "-A LOGNDROP -p icmp -m limit --limit 5/min -j LOG --log-ip-options --log-uid", + "-A LOGNDROP -j DROP", + ] +} + + +def list_iptables_rules(): + result = subprocess.run( + ["iptables", "-S"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + rules = result.stdout.decode("utf-8").splitlines() + policies = [r for r in rules if r.startswith("-P")] + input_rules = [r for r in rules if r.startswith("-A INPUT")] + output_rules = [r for r in rules if r.startswith("-A OUTPUT")] + logndrop_rules = [r for r in rules if r.startswith("-A LOGNDROP")] + return { + "all": rules, + "policies": policies, + "input": input_rules, + "output": output_rules, + "logndrop": logndrop_rules, + } + + +def check_iptables_are_default(rules): + if rules["all"] == IPTABLES_RULES_UNCONFIGURED: + raise ValueError("The iptables rules have not been configured.") + + +def check_iptables_default_drop(rules): + for chain, chain_rules in IPTABLES_RULES_DEFAULT_DROP.items(): + for i, rule in enumerate(reversed(chain_rules), 1): + try: + if rules[chain][-i] != rule: + raise ValueError("The iptables default drop rules are incorrect.") + except (KeyError, IndexError): + raise ValueError("The iptables default drop rules are incorrect.") + + +def check_iptables_rules(): + rules = list_iptables_rules() + check_iptables_are_default(rules) + check_iptables_default_drop(rules) + + +def check_system_configuration(args): + print("Checking system configuration...") + try: + check_iptables_rules() + except ValueError as e: + print("System configuration error:", e) + sys.exit(1) + print("System configuration checks were successful.") + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SecureDrop server configuration check') + args = parser.parse_args() + check_system_configuration(args) diff --git a/install_files/securedrop-ossec-server/var/ossec/checksdconfig.py b/install_files/securedrop-ossec-server/var/ossec/checksdconfig.py new file mode 100755 --- /dev/null +++ b/install_files/securedrop-ossec-server/var/ossec/checksdconfig.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import argparse +import subprocess +import sys + + +IPTABLES_RULES_UNCONFIGURED = { + "all": ["-P INPUT ACCEPT", "-P FORWARD ACCEPT", "-P OUTPUT ACCEPT"] +} + + +IPTABLES_RULES_DEFAULT_DROP = { + "policies": [ + "-P INPUT DROP", + "-P FORWARD DROP", + "-P OUTPUT DROP", + ], + "input": [ + '-A INPUT -m comment --comment "Drop and log all other incoming traffic" -j LOGNDROP', + ], + "output": [ + '-A OUTPUT -m comment --comment "Drop all other outgoing traffic" -j DROP', + ], + "logndrop": [ + ( + "-A LOGNDROP -p tcp -m limit --limit 5/min -j LOG --log-tcp-options --log-ip-options " + "--log-uid" + ), + "-A LOGNDROP -p udp -m limit --limit 5/min -j LOG --log-ip-options --log-uid", + "-A LOGNDROP -p icmp -m limit --limit 5/min -j LOG --log-ip-options --log-uid", + "-A LOGNDROP -j DROP", + ] +} + + +def list_iptables_rules(): + result = subprocess.run( + ["iptables", "-S"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + rules = result.stdout.decode("utf-8").splitlines() + policies = [r for r in rules if r.startswith("-P")] + input_rules = [r for r in rules if r.startswith("-A INPUT")] + output_rules = [r for r in rules if r.startswith("-A OUTPUT")] + logndrop_rules = [r for r in rules if r.startswith("-A LOGNDROP")] + return { + "all": rules, + "policies": policies, + "input": input_rules, + "output": output_rules, + "logndrop": logndrop_rules, + } + + +def check_iptables_are_default(rules): + if rules["all"] == IPTABLES_RULES_UNCONFIGURED: + raise ValueError("The iptables rules have not been configured.") + + +def check_iptables_default_drop(rules): + for chain, chain_rules in IPTABLES_RULES_DEFAULT_DROP.items(): + for i, rule in enumerate(reversed(chain_rules), 1): + try: + if rules[chain][-i] != rule: + raise ValueError("The iptables default drop rules are incorrect.") + except (KeyError, IndexError): + raise ValueError("The iptables default drop rules are incorrect.") + + +def check_iptables_rules(): + rules = list_iptables_rules() + check_iptables_are_default(rules) + check_iptables_default_drop(rules) + + +def check_system_configuration(args): + print("Checking system configuration...") + try: + check_iptables_rules() + except ValueError as e: + print("System configuration error:", e) + sys.exit(1) + print("System configuration checks were successful.") + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SecureDrop server configuration check') + args = parser.parse_args() + check_system_configuration(args) diff --git a/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py b/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py new file mode 100644 --- /dev/null +++ b/securedrop/alembic/versions/35513370ba0d_add_source_deleted_at.py @@ -0,0 +1,32 @@ +"""add Source.deleted_at + +Revision ID: 35513370ba0d +Revises: 523fff3f969c +Create Date: 2020-05-06 22:28:01.214359 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '35513370ba0d' +down_revision = '523fff3f969c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sources', schema=None) as batch_op: + batch_op.add_column(sa.Column('deleted_at', sa.DateTime(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sources', schema=None) as batch_op: + batch_op.drop_column('deleted_at') + + # ### end Alembic commands ### diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py --- a/securedrop/crypto_util.py +++ b/securedrop/crypto_util.py @@ -4,6 +4,7 @@ import pretty_bad_protocol as gnupg import os import io +import re import scrypt from random import SystemRandom @@ -76,6 +77,8 @@ class CryptoUtil: REDIS_FINGERPRINT_HASH = "sd/crypto-util/fingerprints" REDIS_KEY_HASH = "sd/crypto-util/keys" + SOURCE_KEY_UID_RE = re.compile(r"(Source|Autogenerated) Key <[-A-Za-z0-9+/=_]+>") + def __init__(self, scrypt_params, scrypt_id_pepper, @@ -213,23 +216,52 @@ def genkeypair(self, name, secret): key_length=self.__gpg_key_length, passphrase=secret, name_email=name, + name_real="Source Key", creation_date=self.DEFAULT_KEY_CREATION_DATE.isoformat(), expire_date=self.DEFAULT_KEY_EXPIRATION_DATE )) return genkey_obj + def find_source_key(self, fingerprint: str) -> typing.Optional[typing.Dict]: + """ + Searches the GPG keyring for a source key. + + A source key has the given fingerprint and is labeled either + "Source Key" or "Autogenerated Key". + + Returns the key or None. + """ + keys = self.gpg.list_keys() + for key in keys: + if fingerprint != key["fingerprint"]: + continue + + for uid in key["uids"]: + if self.SOURCE_KEY_UID_RE.match(uid): + return key + else: + return None + return None + def delete_reply_keypair(self, source_filesystem_id): - key = self.get_fingerprint(source_filesystem_id) + fingerprint = self.get_fingerprint(source_filesystem_id) + # If this source was never flagged for review, they won't have a reply # keypair - if not key: + if not fingerprint: return + # verify that the key with the given fingerprint belongs to a source + key = self.find_source_key(fingerprint) + if not key: + raise ValueError("source key not found") + # Always delete keys without invoking pinentry-mode = loopback # see: https://lists.gnupg.org/pipermail/gnupg-users/2016-May/055965.html temp_gpg = gnupg.GPG(binary='gpg2', homedir=self.gpg_key_dir) + # The subkeys keyword argument deletes both secret and public keys. - temp_gpg.delete_keys(key, secret=True, subkeys=True) + temp_gpg.delete_keys(fingerprint, secret=True, subkeys=True) self.redis.hdel(self.REDIS_KEY_HASH, self.get_fingerprint(source_filesystem_id)) self.redis.hdel(self.REDIS_FINGERPRINT_HASH, source_filesystem_id) diff --git a/securedrop/journalist.py b/securedrop/journalist.py --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -15,7 +15,7 @@ def prime_keycache(): Preloads CryptoUtil.keycache. """ with app.app_context(): - for source in Source.query.filter_by(pending=False).all(): + for source in Source.query.filter_by(pending=False, deleted_at=None).all(): app.crypto_util.get_pubkey(source.filesystem_id) diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -135,7 +135,7 @@ def get_token(): @api.route('/sources', methods=['GET']) @token_required def get_all_sources(): - sources = Source.query.filter_by(pending=False).all() + sources = Source.query.filter_by(pending=False, deleted_at=None).all() return jsonify( {'sources': [source.to_json() for source in sources]}), 200 diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -41,7 +41,12 @@ def col(filesystem_id): def delete_single(filesystem_id): """deleting a single collection from its /col page""" source = get_source(filesystem_id) - delete_collection(filesystem_id) + try: + delete_collection(filesystem_id) + except ValueError as e: + current_app.logger.error("error deleting collection: %s", e) + abort(500) + flash(gettext("{source_name}'s collection deleted") .format(source_name=source.journalist_designation), "notification") diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py --- a/securedrop/journalist_app/forms.py +++ b/securedrop/journalist_app/forms.py @@ -3,7 +3,7 @@ from flask_babel import lazy_gettext as gettext from flask_wtf import FlaskForm from flask_wtf.file import FileField, FileAllowed, FileRequired -from wtforms import (TextAreaField, TextField, BooleanField, HiddenField, +from wtforms import (TextAreaField, StringField, BooleanField, HiddenField, ValidationError) from wtforms.validators import InputRequired, Optional @@ -38,16 +38,16 @@ def name_length_validation(form, field): class NewUserForm(FlaskForm): - username = TextField('username', validators=[ + username = StringField('username', validators=[ InputRequired(message=gettext('This field is required.')), minimum_length_validation ]) - first_name = TextField('first_name', validators=[name_length_validation, Optional()]) - last_name = TextField('last_name', validators=[name_length_validation, Optional()]) + first_name = StringField('first_name', validators=[name_length_validation, Optional()]) + last_name = StringField('last_name', validators=[name_length_validation, Optional()]) password = HiddenField('password') is_admin = BooleanField('is_admin') is_hotp = BooleanField('is_hotp') - otp_secret = TextField('otp_secret', validators=[ + otp_secret = StringField('otp_secret', validators=[ otp_secret_validation, Optional() ]) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -64,7 +64,7 @@ def index(): # Long SQLAlchemy statements look best when formatted according to # the Pocoo style guide, IMHO: # http://www.pocoo.org/internal/styleguide/ - sources = Source.query.filter_by(pending=False) \ + sources = Source.query.filter_by(pending=False, deleted_at=None) \ .filter(Source.last_updated.isnot(None)) \ .order_by(Source.last_updated.desc()) \ .all() @@ -171,7 +171,7 @@ def bulk(): @view.route('/download_unread/<filesystem_id>') def download_unread_filesystem_id(filesystem_id): id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions = Submission.query.filter( Submission.source_id == id, Submission.downloaded == false()).all() diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- import binascii +import datetime +import os -from datetime import datetime from flask import (g, flash, current_app, abort, send_file, redirect, url_for, render_template, Markup, sessions, request) from flask_babel import gettext, ngettext @@ -12,7 +13,8 @@ from db import db from models import (get_one_or_else, Source, Journalist, InvalidUsernameException, WrongPasswordException, FirstOrLastNameError, LoginThrottledException, - BadTokenException, SourceStar, PasswordError, Submission, RevokedToken) + BadTokenException, SourceStar, PasswordError, Submission, RevokedToken, + InvalidPasswordLength) from store import add_checksum_for_file import typing @@ -53,11 +55,17 @@ def commit_account_changes(user): flash(gettext("Account updated."), "success") -def get_source(filesystem_id): - """Return a Source object, representing the database row, for the source - with the `filesystem_id`""" +def get_source(filesystem_id, include_deleted=False): + """ + Return the Source object with `filesystem_id` + + If `include_deleted` is False, only sources with a null `deleted_at` will + be returned. + """ source = None query = Source.query.filter(Source.filesystem_id == filesystem_id) + if not include_deleted: + query = query.filter_by(deleted_at=None) source = get_one_or_else(query, current_app.logger, abort) return source @@ -77,7 +85,8 @@ def validate_user(username, password, token, error_message=None): except (InvalidUsernameException, BadTokenException, WrongPasswordException, - LoginThrottledException) as e: + LoginThrottledException, + InvalidPasswordLength) as e: current_app.logger.error("Login for '{}' failed: {}".format( username, e)) if not error_message: @@ -157,7 +166,7 @@ def download(zip_basename, submissions): zf = current_app.storage.get_bulk_archive(submissions, zip_directory=zip_basename) attachment_filename = "{}--{}.zip".format( - zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) + zip_basename, datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S")) # Mark the submissions that have been downloaded as such for submission in submissions: @@ -233,8 +242,11 @@ def col_delete(cols_selected): if len(cols_selected) < 1: flash(gettext("No collections selected for deletion."), "error") else: - for filesystem_id in cols_selected: - delete_collection(filesystem_id) + now = datetime.datetime.utcnow() + sources = Source.query.filter(Source.filesystem_id.in_(cols_selected)) + sources.update({Source.deleted_at: now}, synchronize_session="fetch") + db.session.commit() + num = len(cols_selected) flash(ngettext('{num} collection deleted', '{num} collections deleted', num).format(num=num), @@ -259,17 +271,36 @@ def make_password(config): def delete_collection(filesystem_id): # Delete the source's collection of submissions path = current_app.storage.path(filesystem_id) - current_app.storage.move_to_shredder(path) + if os.path.exists(path): + current_app.storage.move_to_shredder(path) # Delete the source's reply keypair - current_app.crypto_util.delete_reply_keypair(filesystem_id) + try: + current_app.crypto_util.delete_reply_keypair(filesystem_id) + except ValueError as e: + current_app.logger.error("could not delete reply keypair: %s", e) + raise # Delete their entry in the db - source = get_source(filesystem_id) + source = get_source(filesystem_id, include_deleted=True) db.session.delete(source) db.session.commit() +def purge_deleted_sources(): + """ + Deletes all Sources with a non-null `deleted_at` attribute. + """ + sources = Source.query.filter(Source.deleted_at.isnot(None)).order_by(Source.deleted_at).all() + if sources: + current_app.logger.info("Purging deleted sources (%s)", len(sources)) + for source in sources: + try: + delete_collection(source.filesystem_id) + except Exception as e: + current_app.logger.error("Error deleting source %s: %s", source.uuid, e) + + def set_name(user, first_name, last_name): try: user.set_name(first_name, last_name) @@ -312,7 +343,7 @@ def col_download_unread(cols_selected): submissions = [] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions += Submission.query.filter( Submission.downloaded == false(), Submission.source_id == id).all() @@ -328,7 +359,7 @@ def col_download_all(cols_selected): submissions = [] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ - .one().id + .filter_by(deleted_at=None).one().id submissions += Submission.query.filter( Submission.source_id == id).all() return download("all", submissions) diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -73,6 +73,9 @@ class Source(db.Model): # keep track of how many interactions have happened, for filenames interaction_count = Column(Integer, default=0, nullable=False) + # when deletion of the source was requested + deleted_at = Column(DateTime) + # Don't create or bother checking excessively long codenames to prevent DoS NUM_WORDS = 7 MAX_CODENAME_LEN = 128 diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py --- a/securedrop/qa_loader.py +++ b/securedrop/qa_loader.py @@ -19,9 +19,6 @@ from sdconfig import config as sdconfig -random.seed("~(=^–^)") # mrow? - - def random_bool(): return bool(random.getrandbits(1)) diff --git a/securedrop/rm.py b/securedrop/rm.py --- a/securedrop/rm.py +++ b/securedrop/rm.py @@ -22,8 +22,7 @@ import subprocess -def shred(path, delete=True): - # type: (str, bool) -> None +def shred(path: str, delete: bool = True) -> None: """ Run shred on the file at the given path. @@ -51,8 +50,7 @@ def shred(path, delete=True): subprocess.check_call(cmd) -def secure_delete(path): - # type: (str) -> None +def secure_delete(path: str) -> None: """ Securely deletes the file at ``path``. @@ -87,8 +85,7 @@ def secure_delete(path): os.rmdir(d) -def check_secure_delete_capability(): - # type: () -> bool +def check_secure_delete_capability() -> bool: """ Checks the availability of the program we use for secure deletion. diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py --- a/securedrop/source_app/__init__.py +++ b/securedrop/source_app/__init__.py @@ -165,6 +165,7 @@ def setup_g(): try: g.source = Source.query \ .filter(Source.filesystem_id == g.filesystem_id) \ + .filter_by(deleted_at=None) \ .one() except NoResultFound as e: app.logger.error( diff --git a/securedrop/version.py b/securedrop/version.py --- a/securedrop/version.py +++ b/securedrop/version.py @@ -1 +1 @@ -__version__ = '1.3.0' +__version__ = '1.4.0' diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setuptools.setup( name="securedrop-app-code", - version="1.3.0", + version="1.4.0", author="Freedom of the Press Foundation", author_email="[email protected]", description="SecureDrop Server",
diff --git a/admin/tests/files/site-specific b/admin/tests/files/site-specific --- a/admin/tests/files/site-specific +++ b/admin/tests/files/site-specific @@ -1,6 +1,7 @@ app_hostname: app app_ip: 10.20.2.2 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 monitor_hostname: mon monitor_ip: 10.20.3.2 ossec_alert_email: [email protected] @@ -13,7 +14,7 @@ securedrop_app_gpg_fingerprint: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2 securedrop_app_gpg_public_key: key.asc securedrop_app_https_on_source_interface: false securedrop_supported_locales: -- en +- en_US smtp_relay: smtp.gmail.com smtp_relay_port: 587 ssh_users: sd diff --git a/admin/tests/files/site-specific-missing-entries b/admin/tests/files/site-specific-missing-entries --- a/admin/tests/files/site-specific-missing-entries +++ b/admin/tests/files/site-specific-missing-entries @@ -1,6 +1,7 @@ app_hostname: app app_ip: 10.20.2.2 -dns_server: 8.8.8.8 +dns_server: + - 8.8.8.8 monitor_hostname: mon monitor_ip: 10.20.3.2 ossec_alert_email: [email protected] diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -19,7 +19,9 @@ OUTPUT1 = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 +- 8.8.4.4 enable_ssh_over_tor: true journalist_alert_email: '' journalist_alert_gpg_public_key: '' @@ -51,7 +53,9 @@ WHEN_BOTH_TRUE = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 +- 8.8.4.4 enable_ssh_over_tor: true journalist_alert_email: '' journalist_alert_gpg_public_key: '' @@ -83,7 +87,9 @@ WHEN_ONLY_V2 = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 +- 8.8.4.4 enable_ssh_over_tor: true journalist_alert_email: '' journalist_alert_gpg_public_key: '' @@ -115,7 +121,9 @@ JOURNALIST_ALERT_OUTPUT = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 +- 8.8.4.4 enable_ssh_over_tor: true journalist_alert_email: [email protected] journalist_alert_gpg_public_key: sd_admin_test.pub @@ -147,7 +155,9 @@ HTTPS_OUTPUT = '''app_hostname: app app_ip: 10.20.2.2 daily_reboot_time: 5 -dns_server: 8.8.8.8 +dns_server: +- 8.8.8.8 +- 8.8.4.4 enable_ssh_over_tor: true journalist_alert_email: [email protected] journalist_alert_gpg_public_key: sd_admin_test.pub @@ -229,8 +239,8 @@ def verify_hostname_mon_prompt(child): def verify_dns_prompt(child): - child.expect(rb'DNS server specified during installation\:', timeout=2) - assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '8.8.8.8' # noqa: E501 + child.expect(rb'DNS server\(s\):', timeout=2) + assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == '8.8.8.8 8.8.4.4' # noqa: E501 def verify_app_gpg_key_prompt(child): @@ -674,6 +684,7 @@ def test_sdconfig_enable_https_on_source_interface(): @pytest.fixture def securedrop_git_repo(tmpdir): + cwd = os.getcwd() os.chdir(str(tmpdir)) # Clone the SecureDrop repository into the temp directory. cmd = ['git', 'clone', @@ -698,6 +709,8 @@ def securedrop_git_repo(tmpdir): # It means the coverage file may not exist, don't error pass + os.chdir(cwd) + def set_reliable_keyserver(gpgdir): # If gpg.conf doesn't exist, create it and set a reliable default diff --git a/molecule/builder-xenial/tests/vars.yml b/molecule/builder-xenial/tests/vars.yml --- a/molecule/builder-xenial/tests/vars.yml +++ b/molecule/builder-xenial/tests/vars.yml @@ -1,7 +1,7 @@ --- -securedrop_version: "1.3.0" +securedrop_version: "1.4.0" ossec_version: "3.6.0" -keyring_version: "0.1.3" +keyring_version: "0.1.4" config_version: "0.1.3" grsec_version: "4.14.175" diff --git a/molecule/fetch-tor-packages/tests/test_tor_packages.py b/molecule/fetch-tor-packages/tests/test_tor_packages.py --- a/molecule/fetch-tor-packages/tests/test_tor_packages.py +++ b/molecule/fetch-tor-packages/tests/test_tor_packages.py @@ -8,7 +8,7 @@ {"name": "tor", "arch": "amd64"}, {"name": "tor-geoipdb", "arch": "all"}, ] -TOR_VERSION = "0.4.2.7-1~xenial+1" +TOR_VERSION = "0.4.3.5-1~xenial+1" def test_tor_apt_repo(host): diff --git a/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py b/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py new file mode 100644 --- /dev/null +++ b/molecule/testinfra/staging/app-code/test_securedrop_source_deleter_configuration.py @@ -0,0 +1,48 @@ +import pytest + + +testinfra_hosts = ["app-staging"] + + +def test_securedrop_source_deleter_service(host): + """ + Verify configuration of securedrop_source_deleter systemd service. + """ + securedrop_test_vars = pytest.securedrop_test_vars + service_file = "/lib/systemd/system/securedrop_source_deleter.service" + expected_content = "\n".join([ + "[Unit]", + "Description=SecureDrop Source deleter", + "", + "[Service]", + 'Environment=PYTHONPATH="{}:{}"'.format( + securedrop_test_vars.securedrop_code, securedrop_test_vars.securedrop_venv_site_packages + ), + "ExecStart={}/python /var/www/securedrop/scripts/source_deleter --interval 10".format( + securedrop_test_vars.securedrop_venv_bin + ), + "PrivateDevices=yes", + "PrivateTmp=yes", + "ProtectSystem=full", + "ReadOnlyDirectories=/", + "ReadWriteDirectories={}".format(securedrop_test_vars.securedrop_data), + "Restart=always", + "RestartSec=10s", + "UMask=077", + "User={}".format(securedrop_test_vars.securedrop_user), + "WorkingDirectory={}".format(securedrop_test_vars.securedrop_code), + "", + "[Install]", + "WantedBy=multi-user.target\n", + ]) + + f = host.file(service_file) + assert f.is_file + assert f.mode == 0o644 + assert f.user == "root" + assert f.group == "root" + assert f.content_string == expected_content + + s = host.service("securedrop_source_deleter") + assert s.is_enabled + assert s.is_running diff --git a/molecule/testinfra/staging/common/test_fpf_apt_repo.py b/molecule/testinfra/staging/common/test_fpf_apt_repo.py --- a/molecule/testinfra/staging/common/test_fpf_apt_repo.py +++ b/molecule/testinfra/staging/common/test_fpf_apt_repo.py @@ -44,7 +44,7 @@ def test_fpf_apt_repo_fingerprint(host): fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg --------------------------------------------- -pub 4096R/00F4AD77 2016-10-20 [expires: 2020-06-30] +pub 4096R/00F4AD77 2016-10-20 [expires: 2021-06-30] Key fingerprint = 2224 5C81 E3BA EB41 38B3 6061 310F 5612 00F4 AD77 uid SecureDrop Release Signing Key""" diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -119,7 +119,7 @@ def config(tmpdir): def alembic_config(config): base_dir = path.join(path.dirname(__file__), '..') migrations_dir = path.join(base_dir, 'alembic') - ini = configparser.SafeConfigParser() + ini = configparser.ConfigParser() ini.read(path.join(base_dir, 'alembic.ini')) ini.set('alembic', 'script_location', path.join(migrations_dir)) diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py --- a/securedrop/tests/functional/journalist_navigation_steps.py +++ b/securedrop/tests/functional/journalist_navigation_steps.py @@ -720,15 +720,20 @@ def _visit_edit_secret(self, otp_type, tooltip_text=''): # Also, confirm the text on the tooltip is the correct one. reset_button.location_once_scrolled_into_view ActionChains(self.driver).move_to_element(reset_button).perform() - time.sleep(1) - explanatory_tooltip_opacity = self.driver.find_elements_by_css_selector( - "#button-reset-two-factor-" + otp_type + " span")[0].value_of_css_property("opacity") - explanatory_tooltip_content = self.driver.find_elements_by_css_selector( - "#button-reset-two-factor-" + otp_type + " span")[0].text - assert explanatory_tooltip_opacity == "1" - if not hasattr(self, "accept_languages"): - assert explanatory_tooltip_content == tooltip_text + def explanatory_tooltip_is_correct(): + explanatory_tooltip = self.driver.find_element_by_css_selector( + "#button-reset-two-factor-" + otp_type + " span" + ) + + explanatory_tooltip_opacity = explanatory_tooltip.value_of_css_property("opacity") + assert explanatory_tooltip_opacity == "1" + + if not hasattr(self, "accept_languages"): + assert explanatory_tooltip.text == tooltip_text + + self.wait_for(explanatory_tooltip_is_correct) + reset_form.submit() alert = self.driver.switch_to_alert() diff --git a/securedrop/tests/migrations/migration_35513370ba0d.py b/securedrop/tests/migrations/migration_35513370ba0d.py new file mode 100644 --- /dev/null +++ b/securedrop/tests/migrations/migration_35513370ba0d.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- + +import random +from uuid import uuid4 + +from db import db +from journalist_app import create_app +import sqlalchemy +import pytest + +from .helpers import bool_or_none, random_bool, random_chars, random_datetime + + +class UpgradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + with self.app.app_context(): + self.add_source() + self.valid_source_id = 1 + + db.session.commit() + + @staticmethod + def add_source(): + filesystem_id = random_chars(96) if random_bool() else None + params = { + "uuid": str(uuid4()), + "filesystem_id": filesystem_id, + "journalist_designation": random_chars(50), + "flagged": bool_or_none(), + "last_updated": random_datetime(nullable=True), + "pending": bool_or_none(), + "interaction_count": random.randint(0, 1000), + } + sql = """ + INSERT INTO sources ( + uuid, filesystem_id, journalist_designation, flagged, last_updated, + pending, interaction_count + ) VALUES ( + :uuid, :filesystem_id, :journalist_designation, :flagged, :last_updated, + :pending, :interaction_count + ) + """ + + db.engine.execute(sqlalchemy.text(sql), **params) + + def check_upgrade(self): + """ + Check the new `deleted_at` column + + Querying `deleted_at` shouldn't cause an error, and no source + should already have it set. + """ + with self.app.app_context(): + sources = db.engine.execute( + sqlalchemy.text("SELECT * FROM sources WHERE deleted_at IS NOT NULL") + ).fetchall() + assert len(sources) == 0 + + +class DowngradeTester: + def __init__(self, config): + self.config = config + self.app = create_app(config) + + def load_data(self): + pass + + def check_downgrade(self): + """ + After downgrade, using `deleted_at` in a query should raise an exception + """ + with self.app.app_context(): + with pytest.raises(sqlalchemy.exc.OperationalError): + sources = db.engine.execute( + sqlalchemy.text( + "SELECT * FROM sources WHERE deleted_at IS NOT NULL" + ) + ).fetchall() + assert len(sources) == 0 diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py --- a/securedrop/tests/test_crypto_util.py +++ b/securedrop/tests/test_crypto_util.py @@ -295,6 +295,17 @@ def test_delete_reply_keypair_no_key(source_app): source_app.crypto_util.delete_reply_keypair('Reality Winner') +def test_delete_reply_keypair_non_source(source_app): + """ + Checks that a non-source key is not deleted by delete_reply_keypair. + """ + name = "SecureDrop Test/Development (DO NOT USE IN PRODUCTION)" + with pytest.raises(ValueError) as excinfo: + source_app.crypto_util.delete_reply_keypair(name) + assert "source key not found" in str(excinfo.value) + assert source_app.crypto_util.get_fingerprint(name) + + def test_get_fingerprint(source_app, test_source): assert (source_app.crypto_util.get_fingerprint(test_source['filesystem_id']) is not None) diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py --- a/securedrop/tests/test_integration.py +++ b/securedrop/tests/test_integration.py @@ -552,6 +552,9 @@ def test_delete_collections(mocker, journalist_app, source_app, test_journo): assert "{} collections deleted".format(num_sources) in text assert async_genkey.called + # simulate the source_deleter's work + journalist_app_module.utils.purge_deleted_sources() + # Make sure the collections are deleted from the filesystem def assertion(): assert not ( diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -614,6 +614,21 @@ def test_max_password_length(): password=overly_long_password) +def test_login_password_too_long(journalist_app, test_journo, mocker): + mocked_error_logger = mocker.patch('journalist.app.logger.error') + with journalist_app.test_client() as app: + resp = app.post(url_for('main.login'), + data=dict(username=test_journo['username'], + password='a' * (Journalist.MAX_PASSWORD_LEN + 1), + token=TOTP(test_journo['otp_secret']).now())) + assert resp.status_code == 200 + text = resp.data.decode('utf-8') + assert "Login failed" in text + mocked_error_logger.assert_called_once_with( + "Login for '{}' failed: Password too long (len={})".format( + test_journo['username'], Journalist.MAX_PASSWORD_LEN + 1)) + + def test_min_password_length(): """Creating a Journalist with a password that is smaller than the minimum password length should raise an exception. This uses the @@ -1330,7 +1345,7 @@ def test_logo_upload_with_valid_image_succeeds(journalist_app, test_admin): test_admin['otp_secret']) # Create 1px * 1px 'white' PNG file from its base64 string form = journalist_app_module.forms.LogoForm( - logo=(BytesIO(base64.decodestring + logo=(BytesIO(base64.decodebytes (b"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQ" b"VR42mP8/x8AAwMCAO+ip1sAAAAASUVORK5CYII=")), 'test.png') ) @@ -2067,6 +2082,8 @@ def test_col_process_successfully_deletes_multiple_sources(journalist_app, utils.db_helper.submit(source_1, 1) source_2, _ = utils.db_helper.init_source() utils.db_helper.submit(source_2, 1) + source_3, _ = utils.db_helper.init_source() + utils.db_helper.submit(source_3, 1) with journalist_app.test_client() as app: _login_user(app, test_journo['username'], test_journo['password'], @@ -2081,9 +2098,13 @@ def test_col_process_successfully_deletes_multiple_sources(journalist_app, assert resp.status_code == 200 - # Verify there are no remaining sources + # simulate the source_deleter's work + journalist_app_module.utils.purge_deleted_sources() + + # Verify that all of the specified sources were deleted, but no others remaining_sources = Source.query.all() - assert not remaining_sources + assert len(remaining_sources) == 1 + assert remaining_sources[0].uuid == source_3.uuid def test_col_process_successfully_stars_sources(journalist_app, diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -14,6 +14,7 @@ import version from db import db +from journalist_app.utils import delete_collection from models import InstanceConfig, Source, Reply from source_app import main as source_app_main from source_app import api as source_app_api @@ -653,11 +654,7 @@ def test_source_is_deleted_while_logged_in(source_app): # Now the journalist deletes the source filesystem_id = source_app.crypto_util.hash_codename(codename) - source_app.crypto_util.delete_reply_keypair(filesystem_id) - source = Source.query.filter_by( - filesystem_id=filesystem_id).one() - db.session.delete(source) - db.session.commit() + delete_collection(filesystem_id) # Source attempts to continue to navigate resp = app.post(url_for('main.lookup'), follow_redirects=True) @@ -668,8 +665,8 @@ def test_source_is_deleted_while_logged_in(source_app): assert 'codename' not in session logger.assert_called_once_with( - "Found no Sources when one was expected: " - "No row was found for one()") + "Found no Sources when one was expected: No row was found for one()" + ) def test_login_with_invalid_codename(source_app):
Upgrade sphinx for develop requirements ## Description This is caught by `safety` tool. ``` Checking file ./securedrop/requirements/python3/develop-requirements.txt safety report checked 123 packages, using default DB --- -> sphinx, installed 1.6.3, affected <3.0.4, id 38330 Sphinx 3.0.4 updates jQuery version from 3.4.1 to 3.5.1 for security reasons. -- Makefile:139: recipe for target 'safety' failed make: *** [safety] Error 1 make: Leaving directory '/home/circleci/project' ```
2020-06-18T00:52:07Z
[]
[]
freedomofpress/securedrop
5,335
freedomofpress__securedrop-5335
[ "5334" ]
b8fa78ffbef35c26dd1792765150fd0dfdd84c7b
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py --- a/admin/securedrop_admin/__init__.py +++ b/admin/securedrop_admin/__init__.py @@ -424,6 +424,9 @@ def __init__(self, args): def load_and_update_config(self, validate: bool = True, prompt: bool = True): if self.exists(): self.config = self.load(validate) + elif not prompt: + sdlog.error('Please run "securedrop-admin sdconfig" first.') + sys.exit(1) return self.update_config(prompt) @@ -624,6 +627,8 @@ def clean_config(self, config: dict) -> dict: ) raise clean_config[var] = transform(text) if transform else text + if var not in self._config_in_progress: + self._config_in_progress[var] = clean_config[var] return clean_config def load(self, validate=True):
diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py --- a/admin/tests/test_integration.py +++ b/admin/tests/test_integration.py @@ -5,6 +5,7 @@ import pytest import re import requests +import shutil import subprocess import tempfile @@ -191,6 +192,19 @@ def setup_function(function): global SD_DIR SD_DIR = tempfile.mkdtemp() ANSIBLE_BASE = '{0}/install_files/ansible-base'.format(SD_DIR) + + for name in ["roles", "tasks"]: + shutil.copytree( + os.path.join(CURRENT_DIR, "../../install_files/ansible-base", name), + os.path.join(ANSIBLE_BASE, name) + ) + + for name in ["ansible.cfg", "securedrop-prod.yml"]: + shutil.copy( + os.path.join(CURRENT_DIR, '../../install_files/ansible-base', name), + ANSIBLE_BASE + ) + cmd = 'mkdir -p {0}/group_vars/all'.format(ANSIBLE_BASE).split() subprocess.check_call(cmd) for name in ['sd_admin_test.pub', 'ca.crt', 'sd.crt', 'key.asc']: @@ -340,6 +354,29 @@ def verify_v3_onion_when_v2_is_enabled(child): assert ANSI_ESCAPE.sub('', child.buffer.decode("utf-8")).strip() == 'yes' # noqa: E501 +def verify_install_has_valid_config(): + """ + Checks that securedrop-admin install validates the configuration. + """ + cmd = os.path.join(os.path.dirname(CURRENT_DIR), 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} install'.format(cmd, SD_DIR)) + child.expect(b"SUDO password:", timeout=5) + child.close() + + +def test_install_with_no_config(): + """ + Checks that securedrop-admin install complains about a missing config file. + """ + cmd = os.path.join(os.path.dirname(CURRENT_DIR), 'securedrop_admin/__init__.py') + child = pexpect.spawn('python {0} --root {1} install'.format(cmd, SD_DIR)) + child.expect(b'ERROR: Please run "securedrop-admin sdconfig" first.', timeout=5) + child.expect(pexpect.EOF, timeout=5) + child.close() + assert child.exitstatus == 1 + assert child.signalstatus is None + + def test_sdconfig_on_first_run(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), 'securedrop_admin/__init__.py') @@ -401,6 +438,8 @@ def test_sdconfig_on_first_run(): data = fobj.read() assert data == OUTPUT1 + verify_install_has_valid_config() + def test_sdconfig_both_v2_v3_true(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), @@ -463,6 +502,8 @@ def test_sdconfig_both_v2_v3_true(): data = fobj.read() assert data == WHEN_BOTH_TRUE + verify_install_has_valid_config() + def test_sdconfig_only_v2_true(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), @@ -525,6 +566,8 @@ def test_sdconfig_only_v2_true(): data = fobj.read() assert data == WHEN_ONLY_V2 + verify_install_has_valid_config() + def test_sdconfig_enable_journalist_alerts(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), @@ -592,6 +635,8 @@ def test_sdconfig_enable_journalist_alerts(): data = fobj.read() assert JOURNALIST_ALERT_OUTPUT == data + verify_install_has_valid_config() + def test_sdconfig_enable_https_on_source_interface(): cmd = os.path.join(os.path.dirname(CURRENT_DIR), @@ -666,6 +711,8 @@ def test_sdconfig_enable_https_on_source_interface(): data = fobj.read() assert HTTPS_OUTPUT == data + verify_install_has_valid_config() + # The following is the minimal git configuration which can be used to fetch # from the SecureDrop Github repository. We want to use this because the
Install command fails on admin workstation if v2 services enabled, v3 services disabled. ## Description If the `site-specific` file on an Admin Workstation has `v2_onion_services: true` and `v3_onion_services: false ## Steps to Reproduce using an admin workstation USB: - ensure that the workstation code has been upgraded to 1.4.0 - run `./securedrop-admin sdconfig` and set v2 services on, v3 services off - run `./securedrop-admin install` ## Expected Behavior - Install playbook is run, prompting user for server-side admin password ## Actual Behavior - command fails with error `ERROR: Since you disabled v2 onion services, you must enable v3 onion services` ## Comments This issue is 1.4.0-specific, 1.3.0-tagged version works correctly.
I can reproduce this. Investigating...
2020-06-23T19:53:30Z
[]
[]
freedomofpress/securedrop
5,345
freedomofpress__securedrop-5345
[ "5315" ]
141d405ff1dc73739724e7011b489d61e45bfc1f
diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -300,7 +300,7 @@ def single_reply(source_uuid, reply_uuid): def get_all_submissions(): submissions = Submission.query.all() return jsonify({'submissions': [submission.to_json() for - submission in submissions]}), 200 + submission in submissions if submission.source]}), 200 @api.route('/replies', methods=['GET']) @token_required diff --git a/securedrop/models.py b/securedrop/models.py --- a/securedrop/models.py +++ b/securedrop/models.py @@ -215,17 +215,17 @@ def __repr__(self) -> str: def to_json(self) -> 'Dict[str, Union[str, int, bool]]': json_submission = { 'source_url': url_for('api.single_source', - source_uuid=self.source.uuid), + source_uuid=self.source.uuid) if self.source else None, 'submission_url': url_for('api.single_submission', source_uuid=self.source.uuid, - submission_uuid=self.uuid), + submission_uuid=self.uuid) if self.source else None, 'filename': self.filename, 'size': self.size, 'is_read': self.downloaded, 'uuid': self.uuid, 'download_url': url_for('api.download_submission', source_uuid=self.source.uuid, - submission_uuid=self.uuid), + submission_uuid=self.uuid) if self.source else None, } return json_submission
diff --git a/securedrop/tests/test_journalist_api.py b/securedrop/tests/test_journalist_api.py --- a/securedrop/tests/test_journalist_api.py +++ b/securedrop/tests/test_journalist_api.py @@ -376,6 +376,20 @@ def test_authorized_user_can_get_all_submissions(journalist_app, assert observed_submissions == expected_submissions +def test_authorized_user_get_all_submissions_with_disconnected_submissions(journalist_app, + test_submissions, + journalist_api_token): + with journalist_app.test_client() as app: + db.session.execute( + "DELETE FROM sources WHERE id = :id", + {"id": test_submissions["source"].id} + ) + response = app.get(url_for('api.get_all_submissions'), + headers=get_api_headers(journalist_api_token)) + + assert response.status_code == 200 + + def test_authorized_user_get_source_submissions(journalist_app, test_submissions, journalist_api_token):
api: GET /api/v1/submissions produces 500 when there are disconnected submissions ## Description The API's submission endpoint should gracefully handle the case where there is a "disconnected" submission. ## Steps to Reproduce 0. `make dev` 1. docker exec into the container 2. `cd /var/lib/securedrop/` 3. `sqlite3 db.sqlite` 4. `delete from sources where id=1;` 5. While authenticated to the journalist API, `GET /api/v1/submissions` ## Expected Behavior OSSEC alert starts appearing indicating that there are disconnected submissions, encouraging the admin to clean up. ## Actual Behavior 500 occurs: ``` Traceback (most recent call last): File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2309, in __call__ return self.wsgi_app(environ, start_response) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2295, in wsgi_app response = self.handle_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1741, in handle_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 2292, in wsgi_app response = self.full_dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1815, in full_dispatch_request rv = self.handle_user_exception(e) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1718, in handle_user_exception reraise(exc_type, exc_value, tb) File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/_compat.py", line 35, in reraise raise value File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1813, in full_dispatch_request rv = self.dispatch_request() File "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages/flask/app.py", line 1799, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc1/securedrop/securedrop/journalist_app/api.py", line 48, in decorated_function return f(*args, **kwargs) File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc1/securedrop/securedrop/journalist_app/api.py", line 303, in get_all_submissions submission in submissions]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc1/securedrop/securedrop/journalist_app/api.py", line 303, in <listcomp> submission in submissions]}), 200 File "/Users/redshiftzero/Documents/Github/securedrop-1.2.0-rc1/securedrop/securedrop/models.py", line 218, in to_json source_uuid=self.source.uuid), AttributeError: 'NoneType' object has no attribute 'uuid' ``` ## Comments Bit of a weird STR, but the submissions endpoint producing a 500 is unfortunate.
Something like this might be reasonable but we should check how this interacts w/ the client sync logic and also add a regression test: ```diff diff --git a/securedrop/models.py b/securedrop/models.py index 763fd285f..3e9b05167 100644 --- a/securedrop/models.py +++ b/securedrop/models.py @@ -215,17 +215,17 @@ class Submission(db.Model): def to_json(self) -> 'Dict[str, Union[str, int, bool]]': json_submission = { 'source_url': url_for('api.single_source', - source_uuid=self.source.uuid), + source_uuid=self.source.uuid) if self.source else None, 'submission_url': url_for('api.single_submission', source_uuid=self.source.uuid, - submission_uuid=self.uuid), + submission_uuid=self.uuid) if self.source else None, 'filename': self.filename, 'size': self.size, 'is_read': self.downloaded, 'uuid': self.uuid, 'download_url': url_for('api.download_submission', source_uuid=self.source.uuid, - submission_uuid=self.uuid), + submission_uuid=self.uuid) if self.source else None, } return json_submission @@ -285,10 +285,10 @@ class Reply(db.Model): uuid = self.journalist.uuid json_submission = { 'source_url': url_for('api.single_source', - source_uuid=self.source.uuid), + source_uuid=self.source.uuid) if self.source else None, 'reply_url': url_for('api.single_reply', source_uuid=self.source.uuid, - reply_uuid=self.uuid), + reply_uuid=self.uuid) if self.source else None, 'filename': self.filename, 'size': self.size, 'journalist_username': username, ``` @redshiftzero Will work on this. @prateekj117 We've added this to our current sprint. If you're still interested and have time in the next couple of weeks, it's all yours. Let me know if I can help, or if that timeframe isn't convenient, we can take care of it. Thanks! @rmol Timeframe is convenient. Will make a PR by the weekend. Thanks.
2020-06-26T18:03:39Z
[]
[]