Skip to content

Commit

Permalink
remove sync_api call and metadata sync queue
Browse files Browse the repository at this point in the history
  • Loading branch information
Allie Crevier committed Jan 29, 2020
1 parent b8b82f9 commit 5323149
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 50 deletions.
21 changes: 0 additions & 21 deletions securedrop_client/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
from securedrop_client import storage
from securedrop_client import db
from securedrop_client.sync import ApiSync
from securedrop_client.api_jobs.sync import MetadataSyncJob
from securedrop_client.api_jobs.downloads import FileDownloadJob, MessageDownloadJob, \
ReplyDownloadJob, DownloadChecksumMismatchException
from securedrop_client.api_jobs.sources import DeleteSourceJob
Expand Down Expand Up @@ -373,26 +372,6 @@ def authenticated(self):
"""
return bool(self.api and self.api.token is not None)

def sync_api(self):
"""
Grab data from the remote SecureDrop API in a non-blocking manner.
TODO: This should be removed once sync_api calls have been removed from all the different
job handlers.
"""
logger.debug("In sync_api on thread {}".format(self.thread().currentThreadId()))
if self.authenticated():
logger.debug("You are authenticated, going to make your call")

job = MetadataSyncJob(self.data_dir, self.gpg)
job.success_signal.connect(self.on_sync_success, type=Qt.QueuedConnection)
job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection)

self.api_job_queue.enqueue(job)

logger.debug("In sync_api, after call to submit job to queue, on "
"thread {}".format(self.thread().currentThreadId()))

def last_sync(self):
"""
Returns the time of last synchronisation with the remote SD server.
Expand Down
32 changes: 3 additions & 29 deletions securedrop_client/queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

from securedrop_client.api_jobs.base import ApiJob, ApiInaccessibleError, DEFAULT_NUM_ATTEMPTS, \
PauseQueueJob
from securedrop_client.api_jobs.sync import MetadataSyncJob
from securedrop_client.api_jobs.downloads import (FileDownloadJob, MessageDownloadJob,
ReplyDownloadJob)
from securedrop_client.api_jobs.sources import DeleteSourceJob
Expand Down Expand Up @@ -47,7 +46,6 @@ class RunnableQueue(QObject):
DeleteSourceJob: 14,
SendReplyJob: 15,
UpdateStarJob: 16,
MetadataSyncJob: 17,
MessageDownloadJob: 18,
ReplyDownloadJob: 18,
}
Expand Down Expand Up @@ -85,12 +83,7 @@ def add_job(self, job: ApiJob) -> None:
current_order_number = next(self.order_number)
job.order_number = current_order_number
priority = self.JOB_PRIORITIES[type(job)]
try:
self.queue.put_nowait((priority, job))
except Full:
# Pass silently if the queue is full. For use with MetadataSyncJob.
# See #652.
pass
self.queue.put_nowait((priority, job))

def re_add_job(self, job: ApiJob) -> None:
'''
Expand Down Expand Up @@ -148,34 +141,27 @@ def __init__(self, api_client: API, session_maker: scoped_session) -> None:

self.main_thread = QThread()
self.download_file_thread = QThread()
self.metadata_thread = QThread()

self.main_queue = RunnableQueue(api_client, session_maker)
self.download_file_queue = RunnableQueue(api_client, session_maker)
self.metadata_queue = RunnableQueue(api_client, session_maker, size=1)

self.main_queue.moveToThread(self.main_thread)
self.download_file_queue.moveToThread(self.download_file_thread)
self.metadata_queue.moveToThread(self.metadata_thread)

self.main_thread.started.connect(self.main_queue.process)
self.download_file_thread.started.connect(self.download_file_queue.process)
self.metadata_thread.started.connect(self.metadata_queue.process)

self.main_queue.paused.connect(self.on_queue_paused)
self.download_file_queue.paused.connect(self.on_queue_paused)
self.metadata_queue.paused.connect(self.on_queue_paused)

def logout(self) -> None:
self.main_queue.api_client = None
self.download_file_queue.api_client = None
self.metadata_queue.api_client = None

def login(self, api_client: API) -> None:
logger.debug('Passing API token to queues')
self.main_queue.api_client = api_client
self.download_file_queue.api_client = api_client
self.metadata_queue.api_client = api_client
self.start_queues()

def start_queues(self) -> None:
Expand All @@ -187,25 +173,16 @@ def start_queues(self) -> None:
logger.debug('Starting download thread')
self.download_file_thread.start()

if not self.metadata_thread.isRunning():
logger.debug("Starting metadata thread")
self.metadata_thread.start()

def on_queue_paused(self) -> None:
self.paused.emit()

def resume_queues(self) -> None:
logger.info("Resuming queues")
main_paused = not self.main_thread.isRunning()
download_paused = not self.download_file_thread.isRunning()
metadata_paused = not self.metadata_thread.isRunning()
self.start_queues()
if main_paused:
if not self.main_thread.isRunning():
self.main_queue.resume.emit()
if download_paused:
if not self.download_file_thread.isRunning():
self.download_file_queue.resume.emit()
if metadata_paused:
self.metadata_queue.resume.emit()

def enqueue(self, job: ApiJob) -> None:
# Prevent api jobs being added to the queue when not logged in.
Expand All @@ -219,9 +196,6 @@ def enqueue(self, job: ApiJob) -> None:
if isinstance(job, FileDownloadJob):
logger.debug('Adding job to download queue')
self.download_file_queue.add_job(job)
elif isinstance(job, MetadataSyncJob):
logger.debug("Adding job to metadata queue")
self.metadata_queue.add_job(job)
else:
logger.debug('Adding job to main queue')
self.main_queue.add_job(job)

0 comments on commit 5323149

Please sign in to comment.