Skip to content

Commit

Permalink
Simplify the SYNC mode and remove the dependency on fakeredis #176
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Druez <tdruez@nexb.com>
  • Loading branch information
tdruez committed Sep 30, 2021
1 parent 8c7985a commit 4dc8d73
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 26 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ sqlite:
@$(MAKE) migrate

run:
${MANAGE} runserver 8001
${MANAGE} runserver 8001 --noreload

test:
@echo "-> Run the test suite"
Expand Down
18 changes: 0 additions & 18 deletions scanpipe/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,24 +58,6 @@ def ready(self):
self.load_pipelines()
self.set_policies()

if not settings.SCANCODEIO_ASYNC:
self.set_eager_mode()

@staticmethod
def set_eager_mode():
"""
Removes the need for a running Redis server, required by RQ to store the
results, when the `SCANCODEIO_ASYNC` setting is set to False.
"""
import django_rq.queues
from fakeredis import FakeRedis
from fakeredis import FakeStrictRedis

def patched_redis_connection(config, use_strict_redis=False):
return FakeStrictRedis() if use_strict_redis else FakeRedis()

django_rq.queues.get_redis_connection = patched_redis_connection

def flag_stale_runs(self):
"""
Flags the "staled" Runs.
Expand Down
19 changes: 14 additions & 5 deletions scanpipe/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,10 @@ def stop_task(self):
"""
Stops a "running" task.
"""
if not settings.SCANCODEIO_ASYNC:
self.set_task_ended(exitcode=88)
return

job_status = self.job_status

if not job_status:
Expand All @@ -291,9 +295,10 @@ def delete_task(self):
"""
Deletes a "not started" or "queued" task.
"""
if self.task_id:
if settings.SCANCODEIO_ASYNC and self.task_id:
# Cancels the job and deletes the job hash from Redis.
self.job.delete()

self.delete()


Expand Down Expand Up @@ -966,6 +971,10 @@ def execute_task_async(self):
"""
run_pk = str(self.pk)

# Bypass entirely the queue system and run the pipeline in the current thread.
if not settings.SCANCODEIO_ASYNC:
tasks.execute_pipeline_task(run_pk)

job = django_rq.enqueue(
tasks.execute_pipeline_task,
job_id=run_pk,
Expand All @@ -978,11 +987,11 @@ def execute_task_async(self):
# properly "enqueued".
# In case the `django_rq.enqueue()` raise an exception (Redis server error),
# we want to keep the Run status as "not started" rather than "queued".
# Note that the Run is also set as "queued" at the start of
# Note that the Run will then be set as "running" at the start of
# `execute_pipeline_task()` by calling the `set_task_started()`.
# There's no need to call the following in synchronous single thread mode.
if settings.SCANCODEIO_ASYNC:
self.set_task_queued()
# There's no need to call the following in synchronous single thread mode as
# the run will be directly set as "running".
self.set_task_queued()

return job

Expand Down
2 changes: 0 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,6 @@ dev =
sphinxcontrib-django2==1.1.1
# Release
bumpver==2021.1113
# Fake worker
fakeredis==1.6.1

[options.entry_points]
console_scripts =
Expand Down

0 comments on commit 4dc8d73

Please sign in to comment.