Skip to content

Commit

Permalink
changes
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkurPrabhu committed Oct 29, 2023
1 parent 62d85ce commit bb340de
Show file tree
Hide file tree
Showing 3 changed files with 107 additions and 74 deletions.
75 changes: 46 additions & 29 deletions api/all_tasks.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@


import zipfile
import os
import io
import os
import uuid
from api.models.long_running_job import LongRunningJob
import zipfile
from datetime import datetime
from django_q.tasks import AsyncTask

import pytz
from django.conf import settings
from django.utils import timezone
from django_q.tasks import AsyncTask, schedule

import api.util as util
from constance import config as site_config
def create_download_job(job_type, user,photos,filename):
from api.models.long_running_job import LongRunningJob


def create_download_job(job_type, user, photos, filename):
job_id = uuid.uuid4()
lrj = LongRunningJob.objects.create(
started_by=user,
Expand All @@ -19,54 +22,68 @@ def create_download_job(job_type, user,photos,filename):
job_type=job_type,
)
if job_type == LongRunningJob.JOB_DOWNLOAD_PHOTOS:
AsyncTask(zip_photos_task, job_id=job_id,user=user,photos=photos,filename=filename).run()
AsyncTask(
zip_photos_task, job_id=job_id, user=user, photos=photos, filename=filename
).run()

lrj.save()
return job_id


def zip_photos_task(job_id,user,photos,filename):
# import torch
def zip_photos_task(job_id, user, photos, filename):
lrj = LongRunningJob.objects.get(job_id=job_id)
lrj.started_at = datetime.now().replace(tzinfo=pytz.utc)
count=len(photos)
count = len(photos)
lrj.result = {"progress": {"current": 0, "target": count}}
lrj.save()
# num_threads = max(1, site_config.HEAVYWEIGHT_PROCESS)
# torch.set_num_threads(num_threads)
# os.environ["OMP_NUM_THREADS"] = str(num_threads)
output_directory ="/protected_media/all_zip_folder/"
output_directory = os.path.join(settings.MEDIA_ROOT, "zip")
zip_file_name = filename
done_count = 0
# util.logger.info("Using threads: {}".format(torch.get_num_threads()))
try:
if not os.path.exists(output_directory):
os.mkdir(os.path.join(output_directory))
os.mkdir(output_directory)
mf = io.BytesIO()
photos_name = {}

for photo in photos.values():
done_count=done_count+1
done_count = done_count + 1
photo_name = os.path.basename(photo.main_file.path)
if photo_name in photos_name:
photos_name[photo_name] = photos_name[photo_name] + 1
photo_name = str(photos_name[photo_name]) + "-" + photo_name
else:
photos_name[photo_name] = 1
with zipfile.ZipFile(
mf, mode="a", compression=zipfile.ZIP_DEFLATED
) as zf:
zf.write(photo.main_file.path, arcname=photo_name)
with zipfile.ZipFile(mf, mode="a", compression=zipfile.ZIP_DEFLATED) as zf:
zf.write(photo.main_file.path, arcname=photo_name)
lrj.result = {"progress": {"current": done_count, "target": count}}
lrj.save()
with open(os.path.join(output_directory, zip_file_name), 'wb') as output_file:
with open(os.path.join(output_directory, zip_file_name), "wb") as output_file:
output_file.write(mf.getvalue())

except Exception as e:
util.logger.error("Error while converting files to zip: {}".format(e))


lrj.finished_at = datetime.now().replace(tzinfo=pytz.utc)
lrj.finished = True
lrj.save()
return os.path.join(output_directory, zip_file_name)
lrj.save()
# scheduling a task to delete the zip file after a day
execution_time = timezone.now() + timezone.timedelta(days=1)
schedule("api.all_tasks.delete_zip_file", filename, next_run=execution_time)
return os.path.join(output_directory, zip_file_name)


def delete_zip_file(filename):
file_path = os.path.join(settings.MEDIA_ROOT, "zip", filename)
print(file_path)
try:
if not os.path.exists(file_path):
util.logger.error(
"Error while deleting file not found at : {}".format(file_path)
)
return
else:
os.remove(file_path)
util.logger.info("file deleted sucessfully at path : {}".format(file_path))

except Exception as e:
util.logger.error("Error while deleting file: {}".format(e))
5 changes: 2 additions & 3 deletions api/models/long_running_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class LongRunningJob(models.Model):
JOB_CALCULATE_CLIP_EMBEDDINGS = 6
JOB_SCAN_FACES = 7
JOB_CLUSTER_ALL_FACES = 8
JOB_DOWNLOAD_PHOTOS=9
JOB_DOWNLOAD_PHOTOS = 9

JOB_TYPES = (
(JOB_SCAN_PHOTOS, "Scan Photos"),
Expand All @@ -29,8 +29,7 @@ class LongRunningJob(models.Model):
(JOB_SCAN_FACES, "Scan Faces"),
(JOB_CALCULATE_CLIP_EMBEDDINGS, "Calculate Clip Embeddings"),
(JOB_CLUSTER_ALL_FACES, "Find Similar Faces"),
(JOB_DOWNLOAD_PHOTOS,"Download Selected Photos")

(JOB_DOWNLOAD_PHOTOS, "Download Selected Photos"),
)

job_type = models.PositiveIntegerField(
Expand Down
101 changes: 59 additions & 42 deletions api/views/views.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
import io
import os
import secrets
import subprocess
import time
import uuid
import zipfile
from urllib.parse import quote
import secrets
import time
from django.db.models import Sum

import jsonschema
from api.models.long_running_job import LongRunningJob
from django_q.tasks import Task
import magic
from constance import config as site_config
from django.conf import settings
from django.db.models import Q
from django.db.models import Q, Sum
from django.http import HttpResponse, HttpResponseForbidden, StreamingHttpResponse
from django.utils.decorators import method_decorator
from django.utils.encoding import iri_to_uri
Expand All @@ -26,12 +24,13 @@
from rest_framework.views import APIView, exception_handler
from rest_framework_simplejwt.exceptions import TokenError
from rest_framework_simplejwt.tokens import AccessToken
from api.all_tasks import create_download_job, zip_photos_task

from api.all_tasks import create_download_job
from api.api_util import get_search_term_examples
from api.autoalbum import delete_missing_photos
from api.directory_watcher import scan_photos
from api.models import AlbumUser, Photo, User
from api.models.long_running_job import LongRunningJob
from api.schemas.site_settings import site_settings_schema
from api.serializers.album_user import AlbumUserEditSerializer, AlbumUserListSerializer
from api.util import logger
Expand Down Expand Up @@ -364,11 +363,13 @@ def _generate_response(self, photo, path, fname, transcode_videos):
path, fname + ".mp4"
)
return response

if "faces" in path:
response = HttpResponse()
response["Content-Type"] = "image/jpg"
response["X-Accel-Redirect"] = self._get_protected_media_url(path, fname)
return response

if photo.video:
# This is probably very slow -> Save the mime type when scanning
mime = magic.Magic(mime=True)
Expand Down Expand Up @@ -421,6 +422,25 @@ def _generate_response(self, photo, path, fname, transcode_videos):
],
)
def get(self, request, path, fname, format=None):
if path.lower() == "zip":
jwt = request.COOKIES.get("jwt")
if jwt is not None:
try:
token = AccessToken(jwt)
except TokenError:
return HttpResponseForbidden()
else:
return HttpResponseForbidden()
try:
response = HttpResponse()
response["Content-Type"] = "application/x-zip-compressed"
response["X-Accel-Redirect"] = self._get_protected_media_url(
path, fname
)
return response
except Exception:
return HttpResponse(status=404)

if path.lower() == "avatars":
jwt = request.COOKIES.get("jwt")
if jwt is not None:
Expand Down Expand Up @@ -574,7 +594,6 @@ def post(self, request, format=None):
return
mf = io.BytesIO()
photos_name = {}
count=0
for photo in photos.values():
photo_name = os.path.basename(photo.main_file.path)
if photo_name in photos_name:
Expand All @@ -592,51 +611,49 @@ def post(self, request, format=None):
except BaseException as e:
logger.error(str(e))
return HttpResponse(status=404)



class ZipListPhotosView_V2(APIView):

def post(self, request):
import shutil

free_storage = shutil.disk_usage("/").free
data = dict(request.data)
if "image_hashes" not in data:
if "image_hashes" not in data:
return
photo_query=Photo.objects.filter(owner=self.request.user)
photos = photo_query.in_bulk(
data["image_hashes"]
)
if len(photos) == 0 :
return
total_file_size = (
photo_query.aggregate(Sum("size"))["size__sum"]
or None
)
photo_query = Photo.objects.filter(owner=self.request.user)
photos = photo_query.in_bulk(data["image_hashes"])
if len(photos) == 0:
return
total_file_size = photo_query.aggregate(Sum("size"))["size__sum"] or None
if free_storage < total_file_size:
return Response(data={'status': ""},status=507)

filename=str(secrets.token_hex(nbytes=16)+'.zip')
user=self.request.user
job_id=create_download_job(LongRunningJob.JOB_DOWNLOAD_PHOTOS,user=user,photos=photos,filename=filename)
response={"job_id": job_id,'url':filename}

return Response(data=response,status=200)


def get(self,request):
return Response(data={"status": "Insufficient Storage"}, status=507)

filename = str(secrets.token_hex(nbytes=16) + ".zip")
user = self.request.user
job_id = create_download_job(
LongRunningJob.JOB_DOWNLOAD_PHOTOS,
user=user,
photos=photos,
filename=filename,
)
response = {"job_id": job_id, "url": filename}

return Response(data=response, status=200)

def get(self, request):
try:
job=LongRunningJob.objects.get(job_id=request.data["job_id"])
job = LongRunningJob.objects.get(job_id=request.data["job_id"])
if job.finished:
zip_file_path = os.path.join("/protected_media/all_zip_folder/", request.data["url"])
print(zip_file_path)
with open(zip_file_path, 'rb') as zip_file:
response = HttpResponse(zip_file.read(), content_type='application/x-zip-compressed')
response['Content-Disposition'] = f'attachment; filename="{os.path.basename(zip_file_path)}"'
return response
return Response(data={"status": "SUCCESS"}, status=200)
elif job.failed:
return Response(data={'status': 'FAILURE', 'result': job.result},status=500)
return Response(
data={"status": "FAILURE", "result": job.result}, status=500
)
else:
return Response(data={'status': 'PENDING', 'progress': job.result},status=202)
return Response(
data={"status": "PENDING", "progress": job.result}, status=202
)
except BaseException as e:
logger.error(str(e))
return Response(status=404)
return Response(status=404)

0 comments on commit bb340de

Please sign in to comment.