Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Defer source key import #1035

Merged
merged 2 commits into from
Apr 1, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions securedrop_client/api_jobs/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from sqlalchemy.orm.session import Session

from securedrop_client.api_jobs.base import ApiJob
from securedrop_client.crypto import GpgHelper
from securedrop_client.storage import get_remote_data, update_local_storage


Expand All @@ -19,10 +18,9 @@ class MetadataSyncJob(ApiJob):

NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL = 2

def __init__(self, data_dir: str, gpg: GpgHelper) -> None:
def __init__(self, data_dir: str) -> None:
super().__init__(remaining_attempts=self.NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL)
self.data_dir = data_dir
self.gpg = gpg

def call_api(self, api_client: API, session: Session) -> Any:
'''
Expand All @@ -40,7 +38,6 @@ def call_api(self, api_client: API, session: Session) -> Any:
remote_sources, remote_submissions, remote_replies = get_remote_data(api_client)

update_local_storage(session,
self.gpg,
remote_sources,
remote_submissions,
remote_replies,
Expand Down
39 changes: 19 additions & 20 deletions securedrop_client/crypto.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import tempfile

from sqlalchemy.orm import scoped_session
from uuid import UUID

from securedrop_client.config import Config
from securedrop_client.db import Source
Expand Down Expand Up @@ -145,20 +144,17 @@ def _gpg_cmd_base(self) -> list:
cmd.extend(['--trust-model', 'always'])
return cmd

def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None:
session = self.session_maker()
local_source = session.query(Source).filter_by(uuid=source_uuid).one()

logger.debug("Importing key with fingerprint %s", fingerprint)
self._import(key_data)

local_source.fingerprint = fingerprint
local_source.public_key = key_data
session.add(local_source)
session.commit()
def import_key(self, source: Source) -> None:
"""
Imports a Source's GPG key.
"""
logger.debug("Importing key for source %s", source.uuid)
if not source.public_key:
raise CryptoError(f"Could not import key: source {source.uuid} has no key")
self._import(source.public_key)

def _import(self, key_data: str) -> None:
'''Wrapper for `gpg --import-keys`'''
"""Imports a key to the client GnuPG keyring."""

with tempfile.NamedTemporaryFile('w+') as temp_key, \
tempfile.NamedTemporaryFile('w+') as stdout, \
Expand Down Expand Up @@ -187,16 +183,19 @@ def encrypt_to_source(self, source_uuid: str, data: str) -> str:
session = self.session_maker()
source = session.query(Source).filter_by(uuid=source_uuid).one()

# do not attempt to encrypt if the source key is missing
if source.fingerprint is None:
raise CryptoError(
'Could not encrypt reply due to missing fingerprint for source: {}'.format(
source_uuid))

# do not attempt to encrypt if the journalist key is missing
if self.journalist_key_fingerprint is None:
if not self.journalist_key_fingerprint:
raise CryptoError('Could not encrypt reply due to missing fingerprint for journalist')

# do not attempt to encrypt if the source key is missing
if not (source.fingerprint and source.public_key):
raise CryptoError(f'Could not encrypt reply: no key for source {source_uuid}')

try:
self.import_key(source)
except CryptoError as e:
raise CryptoError("Could not import key before encrypting reply: {e}") from e

cmd = self._gpg_cmd_base()

with tempfile.NamedTemporaryFile('w+') as content, \
Expand Down
62 changes: 16 additions & 46 deletions securedrop_client/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import Session

from securedrop_client.crypto import CryptoError, GpgHelper
from securedrop_client.db import (DraftReply, Source, Message, File, Reply, ReplySendStatus,
ReplySendStatusCodes, User)
from securedrop_client.utils import chronometer
Expand Down Expand Up @@ -105,7 +104,6 @@ def get_remote_data(api: API) -> Tuple[List[SDKSource], List[SDKSubmission], Lis


def update_local_storage(session: Session,
gpg: GpgHelper,
remote_sources: List[SDKSource],
remote_submissions: List[SDKSubmission],
remote_replies: List[SDKReply],
Expand All @@ -123,7 +121,7 @@ def update_local_storage(session: Session,
# Because of that, each get_local_* function needs to be called just before
# its respective update_* function.
with chronometer(logger, "update_sources"):
update_sources(gpg, remote_sources, get_local_sources(session), session, data_dir)
update_sources(remote_sources, get_local_sources(session), session, data_dir)

with chronometer(logger, "update_files"):
update_files(remote_files, get_local_files(session), session, data_dir)
Expand All @@ -135,38 +133,8 @@ def update_local_storage(session: Session,
update_replies(remote_replies, get_local_replies(session), session, data_dir)


def update_source_key(gpg: GpgHelper, local_source: Source, remote_source: SDKSource) -> None:
"""
Updates a source's GPG key.
"""
if not remote_source.key.get("fingerprint"):
logger.error("New source data lacks key fingerprint")
return

if not remote_source.key.get("public"):
logger.error("New source data lacks public key")
return

if (
local_source.fingerprint == remote_source.key['fingerprint'] and
local_source.public_key == remote_source.key['public']
):
logger.debug("Source key data is unchanged")
return

try:
# import_key updates the source's key and fingerprint, and commits
gpg.import_key(
remote_source.uuid,
remote_source.key['public'],
remote_source.key['fingerprint']
)
except CryptoError:
logger.error('Failed to update key information for source %s', remote_source.uuid)


def update_sources(gpg: GpgHelper, remote_sources: List[SDKSource],
local_sources: List[Source], session: Session, data_dir: str) -> None:
def update_sources(remote_sources: List[SDKSource], local_sources:
List[Source], session: Session, data_dir: str) -> None:
"""
Given collections of remote sources, the current local sources and a
session to the local database, ensure the state of the local database
Expand All @@ -188,29 +156,31 @@ def update_sources(gpg: GpgHelper, remote_sources: List[SDKSource],
local_source.document_count = source.number_of_documents
local_source.is_starred = source.is_starred
local_source.last_updated = parse(source.last_updated)
local_source.public_key = source.key['public']
local_source.fingerprint = source.key['fingerprint']
session.commit()

update_source_key(gpg, local_source, source)

# Removing the UUID from local_sources_by_uuid ensures
# this record won't be deleted at the end of this
# function.
del local_sources_by_uuid[source.uuid]
logger.debug('Updated source {}'.format(source.uuid))
else:
# A new source to be added to the database.
ns = Source(uuid=source.uuid,
journalist_designation=source.journalist_designation,
is_flagged=source.is_flagged,
interaction_count=source.interaction_count,
is_starred=source.is_starred,
last_updated=parse(source.last_updated),
document_count=source.number_of_documents)
ns = Source(
uuid=source.uuid,
journalist_designation=source.journalist_designation,
is_flagged=source.is_flagged,
interaction_count=source.interaction_count,
is_starred=source.is_starred,
last_updated=parse(source.last_updated),
document_count=source.number_of_documents,
public_key=source.key['public'],
fingerprint=source.key['fingerprint'],
)
session.add(ns)
session.commit()

update_source_key(gpg, ns, source)

logger.debug('Added new source {}'.format(source.uuid))

# The uuids remaining in local_uuids do not exist on the remote server, so
Expand Down
2 changes: 1 addition & 1 deletion securedrop_client/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def __init__(
self.on_sync_success = on_sync_success
self.on_sync_failure = on_sync_failure

self.job = MetadataSyncJob(self.data_dir, self.gpg)
self.job = MetadataSyncJob(self.data_dir)
self.job.success_signal.connect(self.on_sync_success, type=Qt.QueuedConnection)
self.job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection)

Expand Down
85 changes: 2 additions & 83 deletions tests/api_jobs/test_sync.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os

from securedrop_client.api_jobs.sync import MetadataSyncJob
from securedrop_client.crypto import GpgHelper, CryptoError

from tests import factory

Expand All @@ -11,8 +10,7 @@


def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)
job = MetadataSyncJob(homedir)

mock_source = factory.RemoteSource(
key={
Expand All @@ -22,7 +20,6 @@ def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):
}
)

mock_key_import = mocker.patch.object(job.gpg, 'import_key')
mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))
Expand All @@ -33,50 +30,14 @@ def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):

job.call_api(api_client, session)

assert mock_key_import.call_args[0][0] == mock_source.uuid
assert mock_key_import.call_args[0][1] == mock_source.key['public']
assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint']
assert mock_get_remote_data.call_count == 1


def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session, session_maker):
"""
Check that we can gracefully handle a key import failure.
"""
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)

mock_source = factory.RemoteSource(
key={
'type': 'PGP',
'public': PUB_KEY,
'fingerprint': '123456ABC',
}
)

mock_key_import = mocker.patch.object(job.gpg, 'import_key',
side_effect=CryptoError)
mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))

api_client = mocker.MagicMock()
api_client.default_request_timeout = mocker.MagicMock()

job.call_api(api_client, session)

assert mock_key_import.call_args[0][0] == mock_source.uuid
assert mock_key_import.call_args[0][1] == mock_source.key['public']
assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint']
assert mock_get_remote_data.call_count == 1


def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, session_maker):
"""
Check that we can gracefully handle missing source keys.
"""
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)
job = MetadataSyncJob(homedir)

mock_source = factory.RemoteSource(
key={
Expand All @@ -86,55 +47,13 @@ def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, sess
}
)

mock_key_import = mocker.patch.object(job.gpg, 'import_key')
mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))

api_client = mocker.MagicMock()
api_client.default_request_timeout = mocker.MagicMock()

job.call_api(api_client, session)

assert mock_key_import.call_count == 0
assert mock_get_remote_data.call_count == 1


def test_MetadataSyncJob_only_import_new_source_keys(mocker, homedir, session, session_maker):
"""
Verify that we only import source keys we don't already have.
"""
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)

mock_source = factory.RemoteSource(
key={
'type': 'PGP',
'public': PUB_KEY,
'fingerprint': '123456ABC',
}
)

mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))

api_client = mocker.MagicMock()
api_client.default_request_timeout = mocker.MagicMock()

crypto_logger = mocker.patch('securedrop_client.crypto.logger')
storage_logger = mocker.patch('securedrop_client.storage.logger')

job.call_api(api_client, session)

assert mock_get_remote_data.call_count == 1

log_msg = crypto_logger.debug.call_args_list[0][0]
assert log_msg == ('Importing key with fingerprint %s', mock_source.key['fingerprint'])

job.call_api(api_client, session)

assert mock_get_remote_data.call_count == 2

log_msg = storage_logger.debug.call_args_list[5][0][0]
assert log_msg == 'Source key data is unchanged'
8 changes: 6 additions & 2 deletions tests/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,18 @@ def User(**attrs):


def Source(**attrs):

with open(os.path.join(os.path.dirname(__file__), 'files', 'test-key.gpg.pub.asc')) as f:
pub_key = f.read()

global SOURCE_COUNT
SOURCE_COUNT += 1
defaults = dict(
uuid='source-uuid-{}'.format(SOURCE_COUNT),
journalist_designation='testy-mctestface',
is_flagged=False,
public_key='mah pub key',
fingerprint='mah fingerprint',
public_key=pub_key,
fingerprint='B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F',
interaction_count=0,
is_starred=False,
last_updated=datetime.now(),
Expand Down
Loading