diff --git a/securedrop/actions/__init__.py b/securedrop/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/securedrop/actions/exceptions.py b/securedrop/actions/exceptions.py new file mode 100644 index 0000000000..c901a5c9fc --- /dev/null +++ b/securedrop/actions/exceptions.py @@ -0,0 +1,2 @@ +class NotFoundError(Exception): + pass diff --git a/securedrop/actions/pagination.py b/securedrop/actions/pagination.py new file mode 100644 index 0000000000..778469ae1c --- /dev/null +++ b/securedrop/actions/pagination.py @@ -0,0 +1,35 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import List, Optional + +from sqlalchemy.orm import Query + + +@dataclass(frozen=True) +class PaginationConfig: + page_size: int + page_number: int + + def __post_init__(self) -> None: + if self.page_size < 1: + raise ValueError("Received a page_size that's less than 1") + if self.page_number < 0: + raise ValueError("Received a page_number that's less than 0") + + +class SupportsPagination(ABC): + @abstractmethod + def create_query(self) -> Query: + pass + + def perform(self, paginate_results_with_config: Optional[PaginationConfig] = None) -> List: + query = self.create_query() + + if paginate_results_with_config: + offset = ( + paginate_results_with_config.page_size * paginate_results_with_config.page_number + ) + limit = paginate_results_with_config.page_size + query = query.offset(offset).limit(limit) + + return query.all() diff --git a/securedrop/actions/sources_actions.py b/securedrop/actions/sources_actions.py new file mode 100644 index 0000000000..18e8d7c5e9 --- /dev/null +++ b/securedrop/actions/sources_actions.py @@ -0,0 +1,134 @@ +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from pathlib import Path +from typing import Optional + +import models +from actions.exceptions import NotFoundError +from actions.pagination import SupportsPagination +from encryption import EncryptionManager, GpgKeyNotFoundError +from sqlalchemy.orm import Query, Session +from store import Storage + + +class SearchSourcesOrderByEnum(str, Enum): + # Not needed yet; only here if we ever need to order the results. For example: + # LAST_UPDATED_DESC = "LAST_UPDATED_DESC" + pass + + +@dataclass(frozen=True) +class SearchSourcesFilters: + # The default values for the filters below are meant to match the most common + # "use case" when searching sources. Also, using None means "don't enable this filter" + filter_by_is_pending: Optional[bool] = False + filter_by_is_starred: Optional[bool] = None + filter_by_is_deleted: Optional[bool] = False + filter_by_was_updated_after: Optional[datetime] = None + + +class SearchSourcesAction(SupportsPagination): + def __init__( + self, + db_session: Session, + filters: SearchSourcesFilters = SearchSourcesFilters(), + order_by: Optional[SearchSourcesOrderByEnum] = None, + ): + self._db_session = db_session + self._filters = filters + if order_by: + raise NotImplementedError("The order_by argument is not implemented") + + def create_query(self) -> Query: + query = self._db_session.query(models.Source) + + if self._filters.filter_by_is_deleted is True: + query = query.filter(models.Source.deleted_at.isnot(None)) + elif self._filters.filter_by_is_deleted is False: + query = query.filter(models.Source.deleted_at.is_(None)) + else: + # filter_by_is_deleted is None; nothing to do + pass + + if self._filters.filter_by_is_pending is not None: + query = query.filter_by(pending=self._filters.filter_by_is_pending) + + if self._filters.filter_by_is_starred is not None: + query = query.filter(models.Source.is_starred == self._filters.filter_by_is_starred) + + if self._filters.filter_by_was_updated_after is not None: + query = query.filter( + models.Source.last_updated > self._filters.filter_by_was_updated_after + ) + + if self._filters.filter_by_is_pending in [None, False]: + # Never return sources with a None last_updated field unless "pending" sources + # were explicitly requested + query = query.filter(models.Source.last_updated.isnot(None)) + + return query + + +class GetSingleSourceAction: + def __init__( + self, + db_session: Session, + # The two arguments are mutually exclusive + filesystem_id: Optional[str] = None, + uuid: Optional[str] = None, + ) -> None: + self._db_session = db_session + if uuid and filesystem_id: + raise ValueError("uuid and filesystem_id are mutually exclusive") + if uuid is None and filesystem_id is None: + raise ValueError("At least one of uuid and filesystem_id must be supplied") + + self._filesystem_id = filesystem_id + self._uuid = uuid + + def perform(self) -> models.Source: + source: Optional[models.Source] + if self._uuid: + source = self._db_session.query(models.Source).filter_by(uuid=self._uuid).one_or_none() + elif self._filesystem_id: + source = ( + self._db_session.query(models.Source) + .filter_by(filesystem_id=self._filesystem_id) + .one_or_none() + ) + else: + raise ValueError("Should never happen") + + if source is None: + raise NotFoundError() + else: + return source + + +class DeleteSingleSourceAction: + """Delete a source and all of its submissions and GPG key.""" + + def __init__( + self, + db_session: Session, + source: models.Source, + ) -> None: + self._db_session = db_session + self._source = source + + def perform(self) -> None: + # Delete the source's collection of submissions + path = Path(Storage.get_default().path(self._source.filesystem_id)) + if path.exists(): + Storage.get_default().move_to_shredder(path.as_posix()) + + # Delete the source's reply keypair, if it exists + try: + EncryptionManager.get_default().delete_source_key_pair(self._source.filesystem_id) + except GpgKeyNotFoundError: + pass + + # Delete their entry in the db + self._db_session.delete(self._source) + self._db_session.commit() diff --git a/securedrop/journalist.py b/securedrop/journalist.py index 07757a2976..154e41eb5c 100644 --- a/securedrop/journalist.py +++ b/securedrop/journalist.py @@ -1,7 +1,8 @@ +from actions.sources_actions import SearchSourcesAction +from db import db from encryption import EncryptionManager, GpgKeyNotFoundError from execution import asynchronous from journalist_app import create_app -from models import Source from sdconfig import SecureDropConfig config = SecureDropConfig.get_current() @@ -14,7 +15,8 @@ def prime_keycache() -> None: """Pre-load the source public keys into Redis.""" with app.app_context(): encryption_mgr = EncryptionManager.get_default() - for source in Source.query.filter_by(pending=False, deleted_at=None).all(): + all_sources = SearchSourcesAction(db_session=db.session).perform() + for source in all_sources: try: encryption_mgr.get_source_public_key(source.filesystem_id) except GpgKeyNotFoundError: diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py index 1730684b2d..a9e9041776 100644 --- a/securedrop/journalist_app/__init__.py +++ b/securedrop/journalist_app/__init__.py @@ -5,13 +5,13 @@ import i18n import template_filters import version +from actions.exceptions import NotFoundError from db import db from flask import Flask, abort, g, json, redirect, render_template, request, url_for from flask_babel import gettext from flask_wtf.csrf import CSRFError, CSRFProtect from journalist_app import account, admin, api, col, main from journalist_app.sessions import Session, session -from journalist_app.utils import get_source from models import InstanceConfig from sdconfig import SecureDropConfig from werkzeug import Response @@ -72,6 +72,11 @@ def handle_csrf_error(e: CSRFError) -> "Response": session.destroy(("error", msg), session.get("locale")) return redirect(url_for("main.login")) + # Convert a NotFoundError raised by an action into a 404 + @app.errorhandler(NotFoundError) + def handle_action_raised_not_found(e: NotFoundError) -> None: + abort(404) + def _handle_http_exception( error: HTTPException, ) -> Tuple[Union[Response, str], Optional[int]]: @@ -132,7 +137,6 @@ def setup_g() -> Optional[Response]: filesystem_id = request.form.get("filesystem_id") if filesystem_id: g.filesystem_id = filesystem_id # pylint: disable=assigning-non-slot - g.source = get_source(filesystem_id) # pylint: disable=assigning-non-slot return None diff --git a/securedrop/journalist_app/api.py b/securedrop/journalist_app/api.py index fbd084f624..7258a5c25f 100644 --- a/securedrop/journalist_app/api.py +++ b/securedrop/journalist_app/api.py @@ -7,6 +7,12 @@ import flask import werkzeug +from actions.exceptions import NotFoundError +from actions.sources_actions import ( + DeleteSingleSourceAction, + GetSingleSourceAction, + SearchSourcesAction, +) from db import db from flask import Blueprint, abort, jsonify, request from journalist_app import utils @@ -17,7 +23,6 @@ LoginThrottledException, Reply, SeenReply, - Source, Submission, WrongPasswordException, ) @@ -121,31 +126,31 @@ def get_token() -> Tuple[flask.Response, int]: @api.route("/sources", methods=["GET"]) def get_all_sources() -> Tuple[flask.Response, int]: - sources = Source.query.filter_by(pending=False, deleted_at=None).all() + sources = SearchSourcesAction(db_session=db.session).perform() return jsonify({"sources": [source.to_json() for source in sources]}), 200 @api.route("/sources/", methods=["GET", "DELETE"]) def single_source(source_uuid: str) -> Tuple[flask.Response, int]: if request.method == "GET": - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() return jsonify(source.to_json()), 200 elif request.method == "DELETE": - source = get_or_404(Source, source_uuid, column=Source.uuid) - utils.delete_collection(source.filesystem_id) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() + DeleteSingleSourceAction(db_session=db.session, source=source).perform() return jsonify({"message": "Source and submissions deleted"}), 200 else: abort(405) @api.route("/sources//add_star", methods=["POST"]) def add_star(source_uuid: str) -> Tuple[flask.Response, int]: - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() utils.make_star_true(source.filesystem_id) db.session.commit() return jsonify({"message": "Star added"}), 201 @api.route("/sources//remove_star", methods=["DELETE"]) def remove_star(source_uuid: str) -> Tuple[flask.Response, int]: - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() utils.make_star_false(source.filesystem_id) db.session.commit() return jsonify({"message": "Star removed"}), 200 @@ -160,15 +165,15 @@ def flag(source_uuid: str) -> Tuple[flask.Response, int]: @api.route("/sources//conversation", methods=["DELETE"]) def source_conversation(source_uuid: str) -> Tuple[flask.Response, int]: if request.method == "DELETE": - source = get_or_404(Source, source_uuid, column=Source.uuid) - utils.delete_source_files(source.filesystem_id) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() + utils.delete_source_files(source) return jsonify({"message": "Source data deleted"}), 200 else: abort(405) @api.route("/sources//submissions", methods=["GET"]) def all_source_submissions(source_uuid: str) -> Tuple[flask.Response, int]: - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() return ( jsonify({"submissions": [submission.to_json() for submission in source.submissions]}), 200, @@ -176,13 +181,13 @@ def all_source_submissions(source_uuid: str) -> Tuple[flask.Response, int]: @api.route("/sources//submissions//download", methods=["GET"]) def download_submission(source_uuid: str, submission_uuid: str) -> flask.Response: - get_or_404(Source, source_uuid, column=Source.uuid) + GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) return utils.serve_file_with_etag(submission) @api.route("/sources//replies//download", methods=["GET"]) def download_reply(source_uuid: str, reply_uuid: str) -> flask.Response: - get_or_404(Source, source_uuid, column=Source.uuid) + GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) return utils.serve_file_with_etag(reply) @@ -193,11 +198,11 @@ def download_reply(source_uuid: str, reply_uuid: str) -> flask.Response: ) def single_submission(source_uuid: str, submission_uuid: str) -> Tuple[flask.Response, int]: if request.method == "GET": - get_or_404(Source, source_uuid, column=Source.uuid) + GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) return jsonify(submission.to_json()), 200 elif request.method == "DELETE": - get_or_404(Source, source_uuid, column=Source.uuid) + GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() submission = get_or_404(Submission, submission_uuid, column=Submission.uuid) utils.delete_file_object(submission) return jsonify({"message": "Submission deleted"}), 200 @@ -207,13 +212,13 @@ def single_submission(source_uuid: str, submission_uuid: str) -> Tuple[flask.Res @api.route("/sources//replies", methods=["GET", "POST"]) def all_source_replies(source_uuid: str) -> Tuple[flask.Response, int]: if request.method == "GET": - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() return ( jsonify({"replies": [reply.to_json() for reply in source.replies]}), 200, ) elif request.method == "POST": - source = get_or_404(Source, source_uuid, column=Source.uuid) + source = GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() if request.json is None: abort(400, "please send requests in valid JSON") @@ -277,7 +282,7 @@ def all_source_replies(source_uuid: str) -> Tuple[flask.Response, int]: @api.route("/sources//replies/", methods=["GET", "DELETE"]) def single_reply(source_uuid: str, reply_uuid: str) -> Tuple[flask.Response, int]: - get_or_404(Source, source_uuid, column=Source.uuid) + GetSingleSourceAction(db_session=db.session, uuid=source_uuid).perform() reply = get_or_404(Reply, reply_uuid, column=Reply.uuid) if request.method == "GET": return jsonify(reply.to_json()), 200 @@ -364,6 +369,10 @@ def logout() -> Tuple[flask.Response, int]: session.destroy() return jsonify({"message": "Your token has been revoked."}), 200 + @api.errorhandler(NotFoundError) + def handle_not_found_error(e: NotFoundError) -> Tuple[flask.Response, int]: + return jsonify({"message": "Not Found."}), 404 + def _handle_api_http_exception( error: werkzeug.exceptions.HTTPException, ) -> Tuple[flask.Response, int]: diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py index 773eb9a580..94da2108ab 100644 --- a/securedrop/journalist_app/col.py +++ b/securedrop/journalist_app/col.py @@ -1,6 +1,7 @@ from pathlib import Path import werkzeug +from actions.sources_actions import DeleteSingleSourceAction, GetSingleSourceAction from db import db from encryption import EncryptionManager, GpgKeyNotFoundError from flask import ( @@ -26,8 +27,6 @@ col_download_unread, col_star, col_un_star, - delete_collection, - get_source, make_star_false, make_star_true, mark_seen, @@ -55,7 +54,7 @@ def remove_star(filesystem_id: str) -> werkzeug.Response: @view.route("/") def col(filesystem_id: str) -> str: form = ReplyForm() - source = get_source(filesystem_id) + source = GetSingleSourceAction(db_session=db.session, filesystem_id=filesystem_id).perform() try: EncryptionManager.get_default().get_source_public_key(filesystem_id) source.has_key = True @@ -67,12 +66,9 @@ def col(filesystem_id: str) -> str: @view.route("/delete/", methods=("POST",)) def delete_single(filesystem_id: str) -> werkzeug.Response: """deleting a single collection from its /col page""" - source = get_source(filesystem_id) - try: - delete_collection(filesystem_id) - except GpgKeyNotFoundError as e: - current_app.logger.error("error deleting collection: %s", e) - abort(500) + source = GetSingleSourceAction(db_session=db.session, filesystem_id=filesystem_id).perform() + source_designation = source.journalist_designation + DeleteSingleSourceAction(db_session=db.session, source=source).perform() flash( Markup( @@ -80,8 +76,9 @@ def delete_single(filesystem_id: str) -> werkzeug.Response: # Translators: Precedes a message confirming the success of an operation. escape(gettext("Success!")), escape( - gettext("The account and data for the source {} have been deleted.").format( - source.journalist_designation + gettext( + f"The account and data for the source {source_designation}" + f" have been deleted." ) ), ) diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py index 58976bf98a..ab7d14fc7a 100644 --- a/securedrop/journalist_app/main.py +++ b/securedrop/journalist_app/main.py @@ -4,6 +4,7 @@ import store import werkzeug +from actions.sources_actions import GetSingleSourceAction, SearchSourcesAction from db import db from encryption import EncryptionManager from flask import ( @@ -22,9 +23,8 @@ from flask_babel import gettext from journalist_app.forms import ReplyForm from journalist_app.sessions import session -from journalist_app.utils import bulk_delete, download, get_source, validate_user -from models import Reply, SeenReply, Source, SourceStar, Submission -from sqlalchemy.orm import joinedload +from journalist_app.utils import bulk_delete, download, validate_user +from models import Reply, SeenReply, Submission from sqlalchemy.sql import func from store import Storage @@ -65,57 +65,30 @@ def logout() -> werkzeug.Response: @view.route("/") def index() -> str: - # Gather the count of unread submissions for each source - # ID. This query will be joined in the queries for starred and - # unstarred sources below, and the unread counts added to - # their result sets as an extra column. - unread_stmt = ( - db.session.query(Submission.source_id, func.count("*").label("num_unread")) - .filter_by(seen_files=None, seen_messages=None) + # Fetch all sources + all_sources = SearchSourcesAction(db_session=db.session).perform() + + # Add "num_unread" attributes to the source entities + # First gather the count of unread submissions for each source ID + # TODO(AD): Remove this and add support for pagination; switch to a (hybrid?) property + unread_submission_counts_results = ( + db.session.query(Submission.source_id, func.count("*")) + .filter_by(downloaded=False) .group_by(Submission.source_id) - .subquery() - ) - - # Query for starred sources, along with their unread - # submission counts. - starred = ( - db.session.query(Source, unread_stmt.c.num_unread) - .filter_by(pending=False, deleted_at=None) - .filter(Source.last_updated.isnot(None)) - .filter(SourceStar.starred.is_(True)) - .outerjoin(SourceStar) - .options(joinedload(Source.submissions)) - .options(joinedload(Source.star)) - .outerjoin(unread_stmt, Source.id == unread_stmt.c.source_id) - .order_by(Source.last_updated.desc()) .all() ) - - # Now, add "num_unread" attributes to the source entities. - for source, num_unread in starred: - source.num_unread = num_unread or 0 - starred = [source for source, num_unread in starred] - - # Query for sources without stars, along with their unread - # submission counts. - unstarred = ( - db.session.query(Source, unread_stmt.c.num_unread) - .filter_by(pending=False, deleted_at=None) - .filter(Source.last_updated.isnot(None)) - .filter(~Source.star.has(SourceStar.starred.is_(True))) - .options(joinedload(Source.submissions)) - .options(joinedload(Source.star)) - .outerjoin(unread_stmt, Source.id == unread_stmt.c.source_id) - .order_by(Source.last_updated.desc()) - .all() + source_ids_to_unread_submission_counts = { + source_id: subs_count for source_id, subs_count in unread_submission_counts_results + } + # TODO(AD): Remove this dynamically-added attribute; switch to a (hybrid?) property + for source in all_sources: + source.num_unread = source_ids_to_unread_submission_counts.get(source.id, 0) + + response = render_template( + "index.html", + unstarred=[src for src in all_sources if not src.is_starred], + starred=[src for src in all_sources if src.is_starred], ) - - # Again, add "num_unread" attributes to the source entities. - for source, num_unread in unstarred: - source.num_unread = num_unread or 0 - unstarred = [source for source, num_unread in unstarred] - - response = render_template("index.html", unstarred=unstarred, starred=starred) return response @view.route("/reply", methods=("POST",)) @@ -139,10 +112,11 @@ def reply() -> werkzeug.Response: flash(error, "error") return redirect(url_for("col.col", filesystem_id=g.filesystem_id)) - g.source.interaction_count += 1 - filename = "{}-{}-reply.gpg".format( - g.source.interaction_count, g.source.journalist_filename - ) + source = GetSingleSourceAction( + db_session=db.session, filesystem_id=g.filesystem_id + ).perform() + source.interaction_count += 1 + filename = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) EncryptionManager.get_default().encrypt_journalist_reply( for_source_with_filesystem_id=g.filesystem_id, reply_in=form.message.data, @@ -150,7 +124,7 @@ def reply() -> werkzeug.Response: ) try: - reply = Reply(session.get_user(), g.source, filename, Storage.get_default()) + reply = Reply(session.get_user(), source, filename, Storage.get_default()) db.session.add(reply) seen_reply = SeenReply(reply=reply, journalist=session.get_user()) db.session.add(seen_reply) @@ -191,7 +165,12 @@ def bulk() -> Union[str, werkzeug.Response]: action = request.form["action"] error_redirect = url_for("col.col", filesystem_id=g.filesystem_id) doc_names_selected = request.form.getlist("doc_names_selected") - selected_docs = [doc for doc in g.source.collection if doc.filename in doc_names_selected] + + source = GetSingleSourceAction( + db_session=db.session, filesystem_id=g.filesystem_id + ).perform() + selected_docs = [doc for doc in source.collection if doc.filename in doc_names_selected] + if selected_docs == []: if action == "download": flash( @@ -221,7 +200,9 @@ def bulk() -> Union[str, werkzeug.Response]: return redirect(error_redirect) if action == "download": - source = get_source(g.filesystem_id) + source = GetSingleSourceAction( + db_session=db.session, filesystem_id=g.filesystem_id + ).perform() return download( source.journalist_filename, selected_docs, @@ -234,16 +215,17 @@ def bulk() -> Union[str, werkzeug.Response]: @view.route("/download_unread/") def download_unread_filesystem_id(filesystem_id: str) -> werkzeug.Response: + source = GetSingleSourceAction(db_session=db.session, filesystem_id=filesystem_id).perform() + unseen_submissions = ( - Submission.query.join(Source) - .filter(Source.deleted_at.is_(None), Source.filesystem_id == filesystem_id) + Submission.query.filter(Submission.source_id == source.id) .filter(~Submission.seen_files.any(), ~Submission.seen_messages.any()) .all() ) if len(unseen_submissions) == 0: flash(gettext("No unread submissions for this source."), "error") return redirect(url_for("col.col", filesystem_id=filesystem_id)) - source = get_source(filesystem_id) + return download(source.journalist_filename, unseen_submissions) return view diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py index 24989c5775..30b1d3b61f 100644 --- a/securedrop/journalist_app/utils.py +++ b/securedrop/journalist_app/utils.py @@ -1,12 +1,18 @@ import binascii -import os from datetime import datetime, timezone from typing import List, Optional, Union import flask +import models import werkzeug +from actions.exceptions import NotFoundError +from actions.sources_actions import ( + DeleteSingleSourceAction, + GetSingleSourceAction, + SearchSourcesAction, + SearchSourcesFilters, +) from db import db -from encryption import EncryptionManager, GpgKeyNotFoundError from flask import Markup, abort, current_app, escape, flash, redirect, send_file, url_for from flask_babel import gettext, ngettext from journalist_app.sessions import session @@ -48,21 +54,6 @@ def commit_account_changes(user: Journalist) -> None: flash(gettext("Account updated."), "success") -def get_source(filesystem_id: str, include_deleted: bool = False) -> Source: - """ - Return the Source object with `filesystem_id` - - If `include_deleted` is False, only sources with a null `deleted_at` will - be returned. - """ - query = Source.query.filter(Source.filesystem_id == filesystem_id) - if not include_deleted: - query = query.filter_by(deleted_at=None) - source = get_one_or_else(query, current_app.logger, abort) - - return source - - def validate_user( username: str, password: Optional[str], @@ -280,8 +271,10 @@ def bulk_delete( return redirect(url_for("col.col", filesystem_id=filesystem_id)) +# TODO(AD): Turn the next two functions into a SourceUpdateStarredAction def make_star_true(filesystem_id: str) -> None: - source = get_source(filesystem_id) + query = Source.query.filter(Source.filesystem_id == filesystem_id) + source = get_one_or_else(query, current_app.logger, abort) if source.star: source.star.starred = True else: @@ -290,7 +283,8 @@ def make_star_true(filesystem_id: str) -> None: def make_star_false(filesystem_id: str) -> None: - source = get_source(filesystem_id) + query = Source.query.filter(Source.filesystem_id == filesystem_id) + source = get_one_or_else(query, current_app.logger, abort) if not source.star: source_star = SourceStar(source) db.session.add(source_star) @@ -346,16 +340,14 @@ def col_delete(cols_selected: List[str]) -> werkzeug.Response: return redirect(url_for("main.index")) -def delete_source_files(filesystem_id: str) -> None: +def delete_source_files(source: models.Source) -> None: """deletes submissions and replies for specified source""" - source = get_source(filesystem_id, include_deleted=True) - if source is not None: - # queue all files for deletion and remove them from the database - for f in source.collection: - try: - delete_file_object(f) - except Exception: - pass + # queue all files for deletion and remove them from the database + for f in source.collection: + try: + delete_file_object(f) + except Exception: + pass def col_delete_data(cols_selected: List[str]) -> werkzeug.Response: @@ -374,7 +366,14 @@ def col_delete_data(cols_selected: List[str]) -> werkzeug.Response: else: for filesystem_id in cols_selected: - delete_source_files(filesystem_id) + try: + source = GetSingleSourceAction( + db_session=db.session, filesystem_id=filesystem_id + ).perform() + except NotFoundError: + pass + else: + delete_source_files(source) flash( Markup( @@ -390,37 +389,19 @@ def col_delete_data(cols_selected: List[str]) -> werkzeug.Response: return redirect(url_for("main.index")) -def delete_collection(filesystem_id: str) -> None: - """deletes source account including files and reply key""" - # Delete the source's collection of submissions - path = Storage.get_default().path(filesystem_id) - if os.path.exists(path): - Storage.get_default().move_to_shredder(path) - - # Delete the source's reply keypair, if it exists - try: - EncryptionManager.get_default().delete_source_key_pair(filesystem_id) - except GpgKeyNotFoundError: - pass - - # Delete their entry in the db - source = get_source(filesystem_id, include_deleted=True) - db.session.delete(source) - db.session.commit() - - def purge_deleted_sources() -> None: - """ - Deletes all Sources with a non-null `deleted_at` attribute. - """ - sources = Source.query.filter(Source.deleted_at.isnot(None)).order_by(Source.deleted_at).all() - if sources: - current_app.logger.info("Purging deleted sources (%s)", len(sources)) - for source in sources: + """Deletes all Sources with a non-null `deleted_at` attribute.""" + all_sources_to_delete = SearchSourcesAction( + db_session=db.session, filters=SearchSourcesFilters(filter_by_is_deleted=True) + ).perform() + if all_sources_to_delete: + current_app.logger.info(f"Purging deleted sources {len(all_sources_to_delete)}") + + for source in all_sources_to_delete: try: - delete_collection(source.filesystem_id) - except Exception as e: - current_app.logger.error("Error deleting source %s: %s", source.uuid, e) + DeleteSingleSourceAction(db_session=db.session, source=source).perform() + except Exception: + current_app.logger.exception(f"Error deleting source {source.uuid}") def set_name(user: Journalist, first_name: Optional[str], last_name: Optional[str]) -> None: @@ -496,10 +477,9 @@ def set_diceware_password( return True +# TODO(AD): This is only used once; move this to the file where it's used def col_download_unread(cols_selected: List[str]) -> werkzeug.Response: - """ - Download all unseen submissions from all selected sources. - """ + """Download all unseen submissions from all selected sources.""" unseen_submissions = ( Submission.query.join(Source) .filter(Source.deleted_at.is_(None), Source.filesystem_id.in_(cols_selected)) @@ -514,18 +494,16 @@ def col_download_unread(cols_selected: List[str]) -> werkzeug.Response: return download("unread", unseen_submissions) +# TODO(AD): This is only used once; move this to the file where it's used def col_download_all(cols_selected: List[str]) -> werkzeug.Response: """Download all submissions from all selected sources.""" - submissions = [] # type: List[Union[Source, Submission]] - for filesystem_id in cols_selected: - id = ( - Source.query.filter(Source.filesystem_id == filesystem_id) - .filter_by(deleted_at=None) - .one() - .id - ) - submissions += Submission.query.filter(Submission.source_id == id).all() - return download("all", submissions) + all_submissions = ( + Submission.query.join(Source) + .filter(Source.deleted_at.is_(None), Source.filesystem_id.in_(cols_selected)) + .all() + ) + + return download("all", all_submissions) def serve_file_with_etag(db_obj: Union[Reply, Submission]) -> flask.Response: diff --git a/securedrop/management/submissions.py b/securedrop/management/submissions.py index 7db0149ac9..ab196aad34 100644 --- a/securedrop/management/submissions.py +++ b/securedrop/management/submissions.py @@ -6,10 +6,11 @@ from argparse import _SubParsersAction from typing import List, Optional +from actions.sources_actions import SearchSourcesAction, SearchSourcesFilters from db import db from flask.ctx import AppContext from management import app_context -from models import Reply, Source, Submission +from models import Reply, Submission from rm import secure_delete @@ -178,14 +179,21 @@ def were_there_submissions_today( args: argparse.Namespace, context: Optional[AppContext] = None ) -> None: with context or app_context(): - something = ( - db.session.query(Source) - .filter(Source.last_updated > datetime.datetime.utcnow() - datetime.timedelta(hours=24)) - .count() - > 0 + source_updated_today = ( + SearchSourcesAction( + db_session=db.session, + filters=SearchSourcesFilters( + filter_by_was_updated_after=datetime.datetime.utcnow() + - datetime.timedelta(hours=24) + ), + ) + .create_query() + .first() ) + was_one_source_updated_today = source_updated_today is not None + count_file = os.path.join(args.data_root, "submissions_today.txt") - open(count_file, "w").write(something and "1" or "0") + open(count_file, "w").write(was_one_source_updated_today and "1" or "0") def add_check_db_disconnect_parser(subps: _SubParsersAction) -> None: diff --git a/securedrop/models.py b/securedrop/models.py index cd4f0c15e1..ec2cc3d29d 100644 --- a/securedrop/models.py +++ b/securedrop/models.py @@ -22,10 +22,22 @@ from flask_babel import gettext, ngettext from markupsafe import Markup from passphrases import PassphraseGenerator -from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, LargeBinary, String +from sqlalchemy import ( + Boolean, + Column, + DateTime, + ForeignKey, + Integer, + LargeBinary, + String, + and_, + exists, +) from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import Query, backref, relationship from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound +from sqlalchemy.sql import ColumnElement from store import Storage _default_instance_config: Optional["InstanceConfig"] = None @@ -58,11 +70,15 @@ class Source(db.Model): uuid = Column(String(36), unique=True, nullable=False) filesystem_id = Column(String(96), unique=True, nullable=False) journalist_designation = Column(String(255), nullable=False) + + # TODO(AD): last_updated is meant to be None until the source submits something (#3862). + # It being None is essentially the same as the "pending" field. Having two ways to track the + # same thing adds complexity/confusion. I think we should make last_updated non-nullable last_updated = Column(DateTime) + star = relationship("SourceStar", uselist=False, backref="source") - # sources are "pending" and don't get displayed to journalists until they - # submit something + # sources are "pending" and don't get displayed to journalists until they submit something pending = Column(Boolean, default=True) # keep track of how many interactions have happened, for filenames @@ -99,7 +115,7 @@ def documents_messages_count(self) -> "Dict[str, int]": def collection(self) -> "List[Union[Submission, Reply]]": """Return the list of submissions and replies for this source, sorted in ascending order by the filename/interaction count.""" - collection = [] # type: List[Union[Submission, Reply]] + collection: List[Union[Submission, Reply]] = [] collection.extend(self.submissions) collection.extend(self.replies) collection.sort(key=lambda x: int(x.filename.split("-")[0])) @@ -119,6 +135,19 @@ def public_key(self) -> "Optional[str]": except GpgKeyNotFoundError: return None + # Getting rid of SourceStar and using just a regular Source.is_starred boolean column would + # be a lot simpler... + @hybrid_property + def is_starred(self) -> bool: + if self.star and self.star.starred: + return True + else: + return False + + @is_starred.expression # type: ignore + def is_starred(self) -> ColumnElement: + return exists().where(and_(self.id == SourceStar.source_id, SourceStar.starred.is_(True))) + def to_json(self) -> "Dict[str, object]": docs_msg_count = self.documents_messages_count() @@ -127,17 +156,12 @@ def to_json(self) -> "Dict[str, object]": else: last_updated = datetime.datetime.now(tz=datetime.timezone.utc) - if self.star and self.star.starred: - starred = True - else: - starred = False - json_source = { "uuid": self.uuid, "url": url_for("api.single_source", source_uuid=self.uuid), "journalist_designation": self.journalist_designation, "is_flagged": False, - "is_starred": starred, + "is_starred": self.is_starred, "last_updated": last_updated, "interaction_count": self.interaction_count, "key": { @@ -219,7 +243,7 @@ def to_json(self) -> "Dict[str, Any]": "size": self.size, "is_file": self.is_file, "is_message": self.is_message, - "is_read": self.seen, + "is_read": self.downloaded, "uuid": self.uuid, "download_url": url_for( "api.download_submission", @@ -232,17 +256,6 @@ def to_json(self) -> "Dict[str, Any]": } return json_submission - @property - def seen(self) -> bool: - """ - If the submission has been downloaded or seen by any journalist, then the submission is - considered seen. - """ - if self.downloaded or self.seen_files.count() or self.seen_messages.count(): - return True - - return False - class Reply(db.Model): __tablename__ = "replies" diff --git a/securedrop/tests/actions/__init__.py b/securedrop/tests/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/securedrop/tests/actions/test_sources_actions.py b/securedrop/tests/actions/test_sources_actions.py new file mode 100644 index 0000000000..5e70c844ba --- /dev/null +++ b/securedrop/tests/actions/test_sources_actions.py @@ -0,0 +1,233 @@ +from datetime import datetime +from pathlib import Path +from typing import List + +import models +import pytest +from actions.pagination import PaginationConfig +from actions.sources_actions import ( + DeleteSingleSourceAction, + SearchSourcesAction, + SearchSourcesFilters, +) +from encryption import EncryptionManager, GpgKeyNotFoundError +from journalist_app.utils import mark_seen +from tests import utils +from tests.factories.models_factories import SourceFactory +from tests.functional.db_session import get_database_session + + +@pytest.fixture +def app_db_session(journalist_app): + with get_database_session(journalist_app.config["SQLALCHEMY_DATABASE_URI"]) as db_session: + yield db_session + + +class TestSearchSourcesAction: + def test(self, app_db_session, app_storage): + # Given an app with some sources in different states + some_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=3, pending=False + ) + # Including some sources that haven't submitted anything yet + SourceFactory.create_batch(app_db_session, app_storage, records_count=4, pending=True) + # And some sources that were deleted + SourceFactory.create_batch( + app_db_session, app_storage, records_count=3, deleted_at=datetime.utcnow() + ) + + # When searching for all sources, then it succeeds + returned_sources = SearchSourcesAction(db_session=app_db_session).perform() + + # And only non-deleted and non-pending sources were returned by default + expected_source_ids = {src.id for src in some_sources} + assert {src.id for src in returned_sources} == expected_source_ids + + def test_filter_by_is_pending(self, app_db_session, app_storage): + # Given an app with sources that are pending + created_pending_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=4, pending=True + ) + + # And some other sources that are NOT pending + created_non_pending_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=3, pending=False + ) + + # When searching for all pending sources, then it succeeds + returned_pending_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_pending=True), + ).perform() + + # And the expected sources were returned + assert {src.id for src in returned_pending_sources} == { + src.id for src in created_pending_sources + } + + # And when searching for all NON-pending sources, then it succeeds + returned_non_pending_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_pending=False), + ).perform() + + # And the expected sources were returned + assert {src.id for src in returned_non_pending_sources} == { + src.id for src in created_non_pending_sources + } + + def test_filter_by_is_starred(self, app_db_session, app_storage): + # Given an app with sources that are starred + created_starred_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=4, is_starred=True + ) + # And some other sources that have never been starred + created_non_starred_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=3, is_starred=False + ) + # And some other sources that were starred and then un-starred + created_starred_then_un_starred_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=2, is_starred=True + ) + for src in created_starred_then_un_starred_sources: + src.star.starred = False + app_db_session.commit() + + # When searching for all starred sources, then it succeeds + returned_starred_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_starred=True), + ).perform() + + # And the expected sources were returned + assert {src.id for src in returned_starred_sources} == { + src.id for src in created_starred_sources + } + + # And when searching for all NON-starred sources, then it succeeds + returned_non_starred_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_starred=False), + ).perform() + + # And the expected sources were returned + expected_non_starred_source_ids = set() + expected_non_starred_source_ids.update( + {src.id for src in created_starred_then_un_starred_sources} + ) + expected_non_starred_source_ids.update({src.id for src in created_non_starred_sources}) + assert expected_non_starred_source_ids == {src.id for src in returned_non_starred_sources} + + def test_filter_by_is_deleted(self, app_db_session, app_storage): + # Given an app with sources that are deleted + created_deleted_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=4, deleted_at=datetime.utcnow() + ) + + # And some other sources that are NOT deleted + created_non_deleted_sources = SourceFactory.create_batch( + app_db_session, app_storage, records_count=3, deleted_at=None + ) + + # When searching for all deleted sources, then it succeeds + returned_deleted_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_deleted=True), + ).perform() + + # And the expected sources were returned + assert {src.id for src in returned_deleted_sources} == { + src.id for src in created_deleted_sources + } + + # And when searching for all NON-deleted sources, then it succeeds + returned_non_pending_sources = SearchSourcesAction( + db_session=app_db_session, + filters=SearchSourcesFilters(filter_by_is_deleted=False), + ).perform() + + # And the expected sources were returned + assert {src.id for src in returned_non_pending_sources} == { + src.id for src in created_non_deleted_sources + } + + def test_paginate_results_with_config(self, app_db_session, app_storage): + # Given an app with some sources + all_sources = SourceFactory.create_batch(app_db_session, app_storage, records_count=10) + + # When searching for the first page of all sources, then it succeeds + first_page_of_sources = SearchSourcesAction(db_session=app_db_session).perform( + paginate_results_with_config=PaginationConfig(page_number=0, page_size=3) + ) + + # And the expected sources were returned + expected_source_ids = {src.id for src in all_sources[0:3]} + assert {src.id for src in first_page_of_sources} == expected_source_ids + + # And when searching for the second page of all sources, then it succeeds + second_page_of_sources = SearchSourcesAction(db_session=app_db_session).perform( + paginate_results_with_config=PaginationConfig(page_number=1, page_size=3) + ) + + # And the expected sources were returned + expected_source_ids = {src.id for src in all_sources[3:6]} + assert {src.id for src in second_page_of_sources} == expected_source_ids + + +class TestDeleteSingleSourceAction: + def test(self, app_db_session, app_storage, test_journo): + # Given an app with some sources including one source to delete + source = SourceFactory.create(app_db_session, app_storage) + SourceFactory.create(app_db_session, app_storage) + + # And the source has an encryption key + encryption_mgr = EncryptionManager.get_default() + assert encryption_mgr.get_source_key_fingerprint(source.filesystem_id) + + # And the source has some submissions, messages and replies + journo = test_journo["journalist"] + all_submissions = utils.db_helper.submit(app_storage, source, 2, submission_type="file") + all_messages = utils.db_helper.submit(app_storage, source, 2, submission_type="message") + all_replies = utils.db_helper.reply(app_storage, journo, source, 2) + assert app_db_session.query(models.Submission).filter_by(source_id=source.id).all() + assert app_db_session.query(models.Reply).filter_by(source_id=source.id).all() + + # And the submissions, messages and replies have been seen + mark_seen(all_submissions, journo) + mark_seen(all_messages, journo) + mark_seen(all_replies, journo) + assert app_db_session.query(models.SeenFile).filter_by(journalist_id=journo.id).all() + assert app_db_session.query(models.SeenMessage).filter_by(journalist_id=journo.id).all() + assert app_db_session.query(models.SeenReply).filter_by(journalist_id=journo.id).all() + + # And the submissions point to actual files + all_submissions_file_paths: List[Path] = [ + Path(app_storage.path(source.filesystem_id, submission.filename)) + for submission in all_submissions + ] + assert all_submissions_file_paths + for file_path in all_submissions_file_paths: + assert file_path.exists() + + # When deleting the source, then it succeeds + DeleteSingleSourceAction(db_session=app_db_session, source=source).perform() + + # And the source's DB record was deleted + assert app_db_session.query(models.Source).get(source.id) is None + + # And the source's key was deleted + with pytest.raises(GpgKeyNotFoundError): + encryption_mgr.get_source_key_fingerprint(source.filesystem_id) + + # And the source's submissions, replies and messages were deleted + assert not app_db_session.query(models.Submission).filter_by(source_id=source.id).all() + assert not app_db_session.query(models.Reply).filter_by(source_id=source.id).all() + + # And the submissions' files were deleted + for file_path in all_submissions_file_paths: + assert not file_path.exists() + + # And the records to track whether the submissions have been seen were deleted + assert not app_db_session.query(models.SeenFile).filter_by(journalist_id=journo.id).all() + assert not app_db_session.query(models.SeenMessage).filter_by(journalist_id=journo.id).all() + assert not app_db_session.query(models.SeenReply).filter_by(journalist_id=journo.id).all() diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py index 0d81a0a730..bc8e901cec 100644 --- a/securedrop/tests/conftest.py +++ b/securedrop/tests/conftest.py @@ -21,13 +21,13 @@ from flask import Flask, url_for from hypothesis import settings from journalist_app import create_app as create_journalist_app -from passphrases import PassphraseGenerator from sdconfig import DEFAULT_SECUREDROP_ROOT, SecureDropConfig from source_app import create_app as create_source_app -from source_user import _SourceScryptManager, create_source_user +from source_user import _SourceScryptManager from store import Storage from tests import utils -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory +from tests.factories.models_factories import SourceFactory from tests.utils import i18n from two_factor import TOTP @@ -230,20 +230,11 @@ def test_admin(journalist_app: Flask) -> Dict[str, Any]: @pytest.fixture(scope="function") def test_source(journalist_app: Flask, app_storage: Storage) -> Dict[str, Any]: with journalist_app.app_context(): - passphrase = PassphraseGenerator.get_default().generate_passphrase() - source_user = create_source_user( - db_session=db.session, - source_passphrase=passphrase, - source_app_storage=app_storage, - ) - EncryptionManager.get_default().generate_source_key_pair(source_user) - source = source_user.get_db_record() + source = SourceFactory.create(db.session, app_storage) return { - "source_user": source_user, - # TODO(AD): Eventually the next keys could be removed as they are in source_user "source": source, - "codename": passphrase, - "filesystem_id": source_user.filesystem_id, + # TODO(AD): Eventually the next keys could be removed as they are in source + "filesystem_id": source.filesystem_id, "uuid": source.uuid, "id": source.id, } diff --git a/securedrop/tests/factories/__init__.py b/securedrop/tests/factories/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/securedrop/tests/factories.py b/securedrop/tests/factories/configs_factories.py similarity index 100% rename from securedrop/tests/factories.py rename to securedrop/tests/factories/configs_factories.py diff --git a/securedrop/tests/factories/models_factories.py b/securedrop/tests/factories/models_factories.py new file mode 100644 index 0000000000..0f5b5ca7d7 --- /dev/null +++ b/securedrop/tests/factories/models_factories.py @@ -0,0 +1,69 @@ +from datetime import datetime, timezone +from typing import List, Optional + +import models +from encryption import EncryptionManager +from passphrases import PassphraseGenerator +from source_user import create_source_user +from sqlalchemy.orm import Session +from store import Storage + + +class SourceFactory: + @staticmethod + def create( + db_session: Session, + app_storage: Storage, + *, + pending: bool = False, + is_starred: bool = False, + deleted_at: Optional[datetime] = None, + last_updated: Optional[datetime] = datetime.now(tz=timezone.utc), + ) -> models.Source: + passphrase = PassphraseGenerator.get_default().generate_passphrase() + source_user = create_source_user( + db_session=db_session, + source_passphrase=passphrase, + source_app_storage=app_storage, + ) + EncryptionManager.get_default().generate_source_key_pair(source_user) + + source = source_user.get_db_record() + source.last_updated = last_updated + source.pending = pending + + if is_starred: + star = models.SourceStar( + source=source, + starred=is_starred, + ) + db_session.add(star) + + if deleted_at: + source.deleted_at = deleted_at + + db_session.commit() + return source + + @classmethod + def create_batch( + cls, + db_session: Session, + app_storage: Storage, + records_count: int, + *, + pending: bool = False, + is_starred: bool = False, + deleted_at: Optional[datetime] = None, + ) -> List[models.Source]: + created_sources = [] + for _ in range(records_count): + source = cls.create( + db_session, + app_storage, + pending=pending, + deleted_at=deleted_at, + is_starred=is_starred, + ) + created_sources.append(source) + return created_sources diff --git a/securedrop/tests/functional/conftest.py b/securedrop/tests/functional/conftest.py index d42d208a52..ce7aa714c3 100644 --- a/securedrop/tests/functional/conftest.py +++ b/securedrop/tests/functional/conftest.py @@ -3,7 +3,6 @@ import time from contextlib import contextmanager from dataclasses import dataclass -from datetime import datetime, timezone from io import BytesIO from pathlib import Path from typing import Any, Callable, Generator, Optional, Tuple @@ -14,8 +13,7 @@ from models import Journalist from sdconfig import SecureDropConfig from selenium.webdriver.firefox.webdriver import WebDriver -from source_user import SourceUser -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.db_session import get_database_session from tests.functional.web_drivers import WebDriverTypeEnum, get_web_driver @@ -264,7 +262,7 @@ def sd_servers_with_submitted_file( yield sd_servers_result -def create_source_and_submission(config_in_use: SecureDropConfig) -> Tuple[SourceUser, Path]: +def create_source_and_submission(config_in_use: SecureDropConfig) -> Tuple[str, Path]: """Directly create a source and a submission within the app. Some tests for the journalist app require a submission to already be present, and this @@ -275,45 +273,34 @@ def create_source_and_submission(config_in_use: SecureDropConfig) -> Tuple[Sourc """ # This function will be called in a separate Process that runs the app # Hence the late imports - from encryption import EncryptionManager from models import Submission - from passphrases import PassphraseGenerator - from source_user import create_source_user from store import Storage, add_checksum_for_file + from tests.factories.models_factories import SourceFactory from tests.functional.db_session import get_database_session # Create a source - passphrase = PassphraseGenerator.get_default().generate_passphrase() with get_database_session(database_uri=config_in_use.DATABASE_URI) as db_session: - source_user = create_source_user( - db_session=db_session, - source_passphrase=passphrase, - source_app_storage=Storage.get_default(), - ) - source_db_record = source_user.get_db_record() - EncryptionManager.get_default().generate_source_key_pair(source_user) + source = SourceFactory.create(db_session, Storage.get_default(), pending=False) # Create a file submission from this source - source_db_record.interaction_count += 1 + source.interaction_count += 1 app_storage = Storage.get_default() encrypted_file_name = app_storage.save_file_submission( - filesystem_id=source_user.filesystem_id, - count=source_db_record.interaction_count, - journalist_filename=source_db_record.journalist_filename, + filesystem_id=source.filesystem_id, + count=source.interaction_count, + journalist_filename=source.journalist_filename, filename="filename.txt", stream=BytesIO(b"File with S3cr3t content"), ) - submission = Submission(source_db_record, encrypted_file_name, app_storage) + submission = Submission(source, encrypted_file_name, app_storage) db_session.add(submission) - source_db_record.pending = False - source_db_record.last_updated = datetime.now(timezone.utc) db_session.commit() - submission_file_path = app_storage.path(source_user.filesystem_id, submission.filename) + submission_file_path = app_storage.path(source.filesystem_id, submission.filename) add_checksum_for_file( session=db_session, db_obj=submission, file_path=submission_file_path, ) - return source_user, Path(submission_file_path) + return source.filesystem_id, Path(submission_file_path) diff --git a/securedrop/tests/functional/pageslayout/test_journalist_col.py b/securedrop/tests/functional/pageslayout/test_journalist_col.py index 23f0ad6559..919398c2d9 100644 --- a/securedrop/tests/functional/pageslayout/test_journalist_col.py +++ b/securedrop/tests/functional/pageslayout/test_journalist_col.py @@ -21,7 +21,7 @@ import pytest from sdconfig import SecureDropConfig -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.app_navigators.journalist_app_nav import JournalistAppNavigator from tests.functional.conftest import SdServersFixtureResult, spawn_sd_servers from tests.functional.pageslayout.utils import list_locales, save_static_data @@ -33,8 +33,8 @@ def _create_source_and_submission_and_delete_source_key(config_in_use: SecureDro from encryption import EncryptionManager from tests.functional.conftest import create_source_and_submission - source_user, _ = create_source_and_submission(config_in_use) - EncryptionManager.get_default().delete_source_key_pair(source_user.filesystem_id) + source_filesystem_id, _ = create_source_and_submission(config_in_use) + EncryptionManager.get_default().delete_source_key_pair(source_filesystem_id) @pytest.fixture(scope="function") diff --git a/securedrop/tests/functional/pageslayout/test_source_session_layout.py b/securedrop/tests/functional/pageslayout/test_source_session_layout.py index bad17f2a6a..1b8dbb7f84 100644 --- a/securedrop/tests/functional/pageslayout/test_source_session_layout.py +++ b/securedrop/tests/functional/pageslayout/test_source_session_layout.py @@ -3,7 +3,7 @@ from typing import Generator, Tuple import pytest -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.app_navigators.source_app_nav import SourceAppNavigator from tests.functional.conftest import SdServersFixtureResult, spawn_sd_servers from tests.functional.pageslayout.utils import list_locales, save_static_data diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py index 20ec10ed7f..fda75a5e82 100644 --- a/securedrop/tests/functional/test_admin_interface.py +++ b/securedrop/tests/functional/test_admin_interface.py @@ -11,7 +11,7 @@ from selenium.webdriver.common.keys import Keys from selenium.webdriver.firefox.webdriver import WebDriver from selenium.webdriver.support import expected_conditions -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.app_navigators.journalist_app_nav import JournalistAppNavigator from tests.functional.app_navigators.source_app_nav import SourceAppNavigator from tests.functional.conftest import SdServersFixtureResult, spawn_sd_servers diff --git a/securedrop/tests/functional/test_journalist.py b/securedrop/tests/functional/test_journalist.py index 5743b15d6e..bd34a9b412 100644 --- a/securedrop/tests/functional/test_journalist.py +++ b/securedrop/tests/functional/test_journalist.py @@ -23,7 +23,7 @@ from sdconfig import SecureDropConfig from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.keys import Keys -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.app_navigators.journalist_app_nav import JournalistAppNavigator from tests.functional.conftest import ( SdServersFixtureResult, diff --git a/securedrop/tests/functional/test_source_designation_collision.py b/securedrop/tests/functional/test_source_designation_collision.py index 5b6e1d4c89..62cb925f30 100644 --- a/securedrop/tests/functional/test_source_designation_collision.py +++ b/securedrop/tests/functional/test_source_designation_collision.py @@ -1,7 +1,7 @@ from pathlib import Path import pytest -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.app_navigators.source_app_nav import SourceAppNavigator from tests.functional.conftest import spawn_sd_servers diff --git a/securedrop/tests/test_encryption.py b/securedrop/tests/test_encryption.py index 6d7bc36b37..da97980083 100644 --- a/securedrop/tests/test_encryption.py +++ b/securedrop/tests/test_encryption.py @@ -62,18 +62,18 @@ def test_generate_source_key_pair( def test_get_source_public_key(self, test_source): # Given a source user with a key pair in the default encryption manager - source_user = test_source["source_user"] + source_filesystem_id = test_source["filesystem_id"] encryption_mgr = EncryptionManager.get_default() # When using the encryption manager to fetch the source user's public key # It succeeds - source_pub_key = encryption_mgr.get_source_public_key(source_user.filesystem_id) + source_pub_key = encryption_mgr.get_source_public_key(source_filesystem_id) assert source_pub_key assert source_pub_key.startswith("-----BEGIN PGP PUBLIC KEY BLOCK----") # And the key's fingerprint was saved to Redis source_key_fingerprint = encryption_mgr._redis.hget( - encryption_mgr.REDIS_FINGERPRINT_HASH, source_user.filesystem_id + encryption_mgr.REDIS_FINGERPRINT_HASH, source_filesystem_id ) assert source_key_fingerprint @@ -107,22 +107,22 @@ def test_get_source_public_key_wrong_id(self, setup_journalist_key_and_gpg_folde def test_delete_source_key_pair(self, source_app, test_source): # Given a source user with a key pair in the default encryption manager - source_user = test_source["source_user"] + source_filesystem_id = test_source["filesystem_id"] encryption_mgr = EncryptionManager.get_default() # When using the encryption manager to delete this source user's key pair # It succeeds - encryption_mgr.delete_source_key_pair(source_user.filesystem_id) + encryption_mgr.delete_source_key_pair(source_filesystem_id) # And the user's key information can no longer be retrieved with pytest.raises(GpgKeyNotFoundError): - encryption_mgr.get_source_public_key(source_user.filesystem_id) + encryption_mgr.get_source_public_key(source_filesystem_id) with pytest.raises(GpgKeyNotFoundError): - encryption_mgr.get_source_key_fingerprint(source_user.filesystem_id) + encryption_mgr.get_source_key_fingerprint(source_filesystem_id) with pytest.raises(GpgKeyNotFoundError): - encryption_mgr._get_source_key_details(source_user.filesystem_id) + encryption_mgr._get_source_key_details(source_filesystem_id) def test_delete_source_key_pair_on_journalist_key(self, setup_journalist_key_and_gpg_folder): # Given an encryption manager @@ -143,7 +143,7 @@ def test_delete_source_key_pair_pinentry_status_is_handled( Regression test for https://github.com/freedomofpress/securedrop/issues/4294 """ # Given a source user with a key pair in the default encryption manager - source_user = test_source["source_user"] + source_filesystem_id = test_source["filesystem_id"] encryption_mgr = EncryptionManager.get_default() # And a gpg binary that will trigger the issue described in #4294 @@ -154,11 +154,11 @@ def test_delete_source_key_pair_pinentry_status_is_handled( # When using the encryption manager to delete this source user's key pair # It succeeds - encryption_mgr.delete_source_key_pair(source_user.filesystem_id) + encryption_mgr.delete_source_key_pair(source_filesystem_id) # And the user's key information can no longer be retrieved with pytest.raises(GpgKeyNotFoundError): - encryption_mgr.get_source_key_fingerprint(source_user.filesystem_id) + encryption_mgr.get_source_key_fingerprint(source_filesystem_id) # And the bug fix was properly triggered captured = capsys.readouterr() @@ -226,12 +226,16 @@ def test_encrypt_source_file(self, setup_journalist_key_and_gpg_folder, tmp_path # For GPG 2.1+, a non-null passphrase _must_ be passed to decrypt() assert not encryption_mgr._gpg.decrypt(encrypted_file, passphrase="test 123").ok - def test_encrypt_and_decrypt_journalist_reply( - self, source_app, test_source, tmp_path, app_storage - ): + def test_encrypt_and_decrypt_journalist_reply(self, source_app, tmp_path, app_storage): # Given a source user with a key pair in the default encryption manager - source_user1 = test_source["source_user"] + with source_app.app_context(): + source_user1 = create_source_user( + db_session=db.session, + source_passphrase=PassphraseGenerator.get_default().generate_passphrase(), + source_app_storage=app_storage, + ) encryption_mgr = EncryptionManager.get_default() + encryption_mgr.generate_source_key_pair(source_user1) # And another source with a key pair in the default encryption manager with source_app.app_context(): diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py index 483233dde5..77ce595847 100644 --- a/securedrop/tests/test_i18n.py +++ b/securedrop/tests/test_i18n.py @@ -31,7 +31,7 @@ from flask_babel import gettext from i18n import parse_locale_set from sdconfig import DEFAULT_SECUREDROP_ROOT, FALLBACK_LOCALE, SecureDropConfig -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from werkzeug.datastructures import Headers NEVER_LOCALE = "eo" # Esperanto diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py index 5501a34292..6cf6dd38e2 100644 --- a/securedrop/tests/test_journalist.py +++ b/securedrop/tests/test_journalist.py @@ -41,7 +41,7 @@ from sqlalchemy.sql.expression import func from store import Storage from tests import utils -from tests.factories import SecureDropConfigFactory +from tests.factories.configs_factories import SecureDropConfigFactory from tests.functional.db_session import get_database_session from tests.utils import login_journalist from tests.utils.i18n import ( @@ -2793,7 +2793,7 @@ def test_delete_data_deletes_submissions_retaining_source( assert len(source.collection) == 4 - journalist_app_module.utils.delete_source_files(test_source["filesystem_id"]) + journalist_app_module.utils.delete_source_files(source) res = Source.query.filter_by(id=test_source["id"]).one_or_none() assert res is not None @@ -2801,119 +2801,6 @@ def test_delete_data_deletes_submissions_retaining_source( assert len(source.collection) == 0 -def test_delete_source_deletes_submissions(journalist_app, test_journo, test_source, app_storage): - """Verify that when a source is deleted, the submissions that - correspond to them are also deleted.""" - - with journalist_app.app_context(): - source = Source.query.get(test_source["id"]) - journo = Journalist.query.get(test_journo["id"]) - - utils.db_helper.submit(app_storage, source, 2) - utils.db_helper.reply(app_storage, journo, source, 2) - - journalist_app_module.utils.delete_collection(test_source["filesystem_id"]) - - res = Source.query.filter_by(id=test_source["id"]).one_or_none() - assert res is None - - -def test_delete_collection_updates_db(journalist_app, test_journo, test_source, app_storage): - """ - Verify that when a source is deleted, the Source record is deleted and all records associated - with the source are deleted. - """ - - with journalist_app.app_context(): - source = Source.query.get(test_source["id"]) - journo = Journalist.query.get(test_journo["id"]) - files = utils.db_helper.submit(app_storage, source, 2) - mark_seen(files, journo) - messages = utils.db_helper.submit(app_storage, source, 2) - mark_seen(messages, journo) - replies = utils.db_helper.reply(app_storage, journo, source, 2) - mark_seen(replies, journo) - - journalist_app_module.utils.delete_collection(test_source["filesystem_id"]) - - # Verify no source record exists - source_result = Source.query.filter_by(id=source.id).all() - assert not source_result - - # Verify no submission from the deleted source exist - submissions_result = Submission.query.filter_by(source_id=source.id).all() - assert not submissions_result - - # Verify no replies to the deleted source exist - replies_result = Reply.query.filter_by(source_id=source.id).all() - assert not replies_result - - # Verify no seen records about the deleted files, messages, or replies exist - for file in files: - seen_file = SeenFile.query.filter_by( - file_id=file.id, journalist_id=journo.id - ).one_or_none() - assert not seen_file - - for message in messages: - seen_message = SeenMessage.query.filter_by( - message_id=message.id, journalist_id=journo.id - ).one_or_none() - assert not seen_message - - for reply in replies: - seen_reply = SeenReply.query.filter_by( - reply_id=reply.id, journalist_id=journo.id - ).one_or_none() - assert not seen_reply - - -def test_delete_source_deletes_source_key(journalist_app, test_source, test_journo, app_storage): - """Verify that when a source is deleted, the PGP key that corresponds - to them is also deleted.""" - - with journalist_app.app_context(): - source = Source.query.get(test_source["id"]) - journo = Journalist.query.get(test_journo["id"]) - - utils.db_helper.submit(app_storage, source, 2) - utils.db_helper.reply(app_storage, journo, source, 2) - - # Source key exists - encryption_mgr = EncryptionManager.get_default() - assert encryption_mgr.get_source_key_fingerprint(test_source["filesystem_id"]) - - journalist_app_module.utils.delete_collection(test_source["filesystem_id"]) - - # Source key no longer exists - with pytest.raises(GpgKeyNotFoundError): - encryption_mgr.get_source_key_fingerprint(test_source["filesystem_id"]) - - -def test_delete_source_deletes_docs_on_disk( - journalist_app, test_source, test_journo, config, app_storage -): - """Verify that when a source is deleted, the encrypted documents that - exist on disk is also deleted.""" - - with journalist_app.app_context(): - source = Source.query.get(test_source["id"]) - journo = Journalist.query.get(test_journo["id"]) - - utils.db_helper.submit(app_storage, source, 2) - utils.db_helper.reply(app_storage, journo, source, 2) - - dir_source_docs = os.path.join(config.STORE_DIR, test_source["filesystem_id"]) - assert os.path.exists(dir_source_docs) - - journalist_app_module.utils.delete_collection(test_source["filesystem_id"]) - - def assertion(): - assert not os.path.exists(dir_source_docs) - - utils.asynchronous.wait_for_assertion(assertion) - - def test_bulk_delete_deletes_db_entries( journalist_app, test_source, test_journo, config, app_storage ): diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py index c11bff2b28..894d12f7a2 100644 --- a/securedrop/tests/test_manage.py +++ b/securedrop/tests/test_manage.py @@ -8,8 +8,7 @@ import manage from management import submissions from models import Journalist, db -from passphrases import PassphraseGenerator -from source_user import create_source_user +from tests.factories.models_factories import SourceFactory YUBIKEY_HOTP = [ "cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc", @@ -204,14 +203,11 @@ def test_were_there_submissions_today(source_app, config, app_storage): args = argparse.Namespace(data_root=data_root, verbose=logging.DEBUG) count_file = os.path.join(data_root, "submissions_today.txt") - source_user = create_source_user( - db_session=db.session, - source_passphrase=PassphraseGenerator.get_default().generate_passphrase(), - source_app_storage=app_storage, + source = SourceFactory.create( + db.session, + app_storage, + last_updated=datetime.datetime.utcnow() - datetime.timedelta(hours=24 * 2), ) - source = source_user.get_db_record() - source.last_updated = datetime.datetime.utcnow() - datetime.timedelta(hours=24 * 2) - db.session.commit() submissions.were_there_submissions_today(args, context) assert open(count_file).read() == "0" source.last_updated = datetime.datetime.utcnow() diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py index f09e7c44e9..0f4a32f93d 100644 --- a/securedrop/tests/test_source.py +++ b/securedrop/tests/test_source.py @@ -13,22 +13,18 @@ import pytest import version +from actions.sources_actions import DeleteSingleSourceAction from db import db from encryption import EncryptionManager -from flaky import flaky from flask import escape, g, request, session, url_for -from flask_babel import gettext -from journalist_app.utils import delete_collection from models import InstanceConfig, Reply, Source from passphrases import PassphraseGenerator from source_app import api as source_app_api -from source_app import get_logo_url, session_manager +from source_app import get_logo_url from source_app.session_manager import SessionManager - -from . import utils -from .utils.db_helper import new_codename, submit -from .utils.i18n import get_test_locales, language_tag, page_language, xfail_untranslated_messages -from .utils.instrument import InstrumentedApp +from tests import utils +from tests.utils.db_helper import new_codename, submit +from tests.utils.instrument import InstrumentedApp GENERATE_DATA = {"tor2web_check": 'href="fake.onion"'} @@ -829,7 +825,9 @@ def test_source_is_deleted_while_logged_in(source_app): # Now that the source is logged in, the journalist deletes the source source_user = SessionManager.get_logged_in_user(db_session=db.session) - delete_collection(source_user.filesystem_id) + DeleteSingleSourceAction( + db_session=db.session, source=source_user.get_db_record() + ).perform() # Source attempts to continue to navigate resp = app.get(url_for("main.lookup"), follow_redirects=True) diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py index 156b36990b..50de4b81fb 100644 --- a/securedrop/tests/test_store.py +++ b/securedrop/tests/test_store.py @@ -13,11 +13,10 @@ from db import db from journalist_app import create_app from models import Reply, Submission -from passphrases import PassphraseGenerator from rq.job import Job -from source_user import create_source_user from store import Storage, async_add_checksum_for_file, queued_add_checksum_for_file from tests import utils +from tests.factories.models_factories import SourceFactory @pytest.fixture(scope="function") @@ -227,12 +226,7 @@ def test_add_checksum_for_file(config, app_storage, db_model): with app.app_context(): db.create_all() - source_user = create_source_user( - db_session=db.session, - source_passphrase=PassphraseGenerator.get_default().generate_passphrase(), - source_app_storage=test_storage, - ) - source = source_user.get_db_record() + source = SourceFactory.create(db.session, app_storage) target_file_path = test_storage.path(source.filesystem_id, "1-foo-msg.gpg") test_message = b"hash me!" expected_hash = "f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927" @@ -296,12 +290,7 @@ def test_async_add_checksum_for_file(config, app_storage, db_model): with app.app_context(): db.create_all() - source_user = create_source_user( - db_session=db.session, - source_passphrase=PassphraseGenerator.get_default().generate_passphrase(), - source_app_storage=app_storage, - ) - source = source_user.get_db_record() + source = SourceFactory.create(db.session, app_storage) target_file_path = app_storage.path(source.filesystem_id, "1-foo-msg.gpg") test_message = b"hash me!" expected_hash = "f1df4a6d8659471333f7f6470d593e0911b4d487856d88c83d2d187afa195927" diff --git a/securedrop/tests/test_submission_cleanup.py b/securedrop/tests/test_submission_cleanup.py index 848314f026..17e1448d1b 100644 --- a/securedrop/tests/test_submission_cleanup.py +++ b/securedrop/tests/test_submission_cleanup.py @@ -5,6 +5,7 @@ from management import submissions from models import Submission from tests import utils +from tests.factories.models_factories import SourceFactory def test_delete_disconnected_db_submissions(journalist_app, app_storage, config): @@ -12,7 +13,7 @@ def test_delete_disconnected_db_submissions(journalist_app, app_storage, config) Test that Submission records without corresponding files are deleted. """ with journalist_app.app_context(): - source, _ = utils.db_helper.init_source(app_storage) + source = SourceFactory.create(db.session, app_storage) source_id = source.id # make two submissions @@ -42,7 +43,8 @@ def test_delete_disconnected_fs_submissions(journalist_app, app_storage, config) """ Test that files in the store without corresponding Submission records are deleted. """ - source, _ = utils.db_helper.init_source(app_storage) + with journalist_app.app_context(): + source = SourceFactory.create(db.session, app_storage) # make two submissions utils.db_helper.submit(app_storage, source, 2) diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py index 55998f037d..90ff99e197 100644 --- a/securedrop/tests/utils/db_helper.py +++ b/securedrop/tests/utils/db_helper.py @@ -89,6 +89,7 @@ def reply(storage, journalist, source, num_replies): return replies +# TODO(AD): Delete this and replace with calls to SourceFactory.create() def init_source(storage): """Initialize a source: create their database record, the filesystem directory that stores their submissions & replies,