Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Aoi, tasks and project non geometries exports and cleanups #6655

Merged
merged 1 commit into from
Dec 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 25 additions & 14 deletions backend/api/projects/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import geojson
from databases import Database
from fastapi import APIRouter, Depends, Request
from fastapi.responses import FileResponse, JSONResponse
from fastapi.responses import FileResponse, JSONResponse, StreamingResponse
from loguru import logger

from backend.db import get_db
Expand Down Expand Up @@ -1043,13 +1043,20 @@ async def get(request: Request, project_id: int, db: Database = Depends(get_db))
project_dto = await ProjectService.get_project_dto_for_mapper(
project_id, None, db, locale, True
)
# TODO Send file.
# Handle file download if requested
if as_file:
return send_file(
io.BytesIO(geojson.dumps(project_dto).encode("utf-8")),
mimetype="application/json",
as_attachment=True,
download_name=f"project_{str(project_id)}.json",
project_dto_str = geojson.dumps(
project_dto, indent=4
) # Convert to GeoJSON string
file_bytes = io.BytesIO(project_dto_str.encode("utf-8"))
file_bytes.seek(0) # Reset stream position

return StreamingResponse(
file_bytes,
media_type="application/geo+json",
headers={
"Content-Disposition": f'attachment; filename="project_{project_id}.geojson"'
},
)

return project_dto
Expand Down Expand Up @@ -1158,15 +1165,19 @@ async def get(request: Request, project_id: int, db: Database = Depends(get_db))
)

project_aoi = await ProjectService.get_project_aoi(project_id, db)
# TODO as file.

if as_file:
return send_file(
io.BytesIO(geojson.dumps(project_aoi).encode("utf-8")),
mimetype="application/json",
as_attachment=True,
download_name=f"{str(project_id)}.geojson",
)
aoi_str = geojson.dumps(project_aoi, indent=4) # Convert AOI to GeoJSON string
file_bytes = io.BytesIO(aoi_str.encode("utf-8"))
file_bytes.seek(0) # Reset stream position

return StreamingResponse(
file_bytes,
media_type="application/geo+json",
headers={
"Content-Disposition": f'attachment; filename="{project_id}.geojson"'
},
)
return project_aoi


Expand Down
11 changes: 6 additions & 5 deletions backend/api/tasks/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,18 +121,19 @@ async def get(request: Request, project_id: int, db: Database = Depends(get_db))

tasks_json = await ProjectService.get_project_tasks(db, int(project_id), tasks)
if as_file:
tasks_json = json.dumps(tasks_json, indent=4) # Pretty-printed JSON
file_bytes = io.BytesIO(tasks_json.encode("utf-8"))
tasks_str = json.dumps(tasks_json, indent=4) # Pretty-printed GeoJSON
file_bytes = io.BytesIO(tasks_str.encode("utf-8"))
file_bytes.seek(0) # Reset stream position

# Return the file response for download
# Return the GeoJSON file response for download
return StreamingResponse(
file_bytes,
media_type="application/json",
media_type="application/geo+json",
headers={
"Content-Disposition": f'attachment; filename="{project_id}-tasks.json"'
"Content-Disposition": f'attachment; filename="{project_id}-tasks.geojson"'
},
)

return tasks_json
except ProjectServiceError as e:
return JSONResponse(content={"Error": str(e)}, status_code=403)
Expand Down
117 changes: 0 additions & 117 deletions backend/models/postgis/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,48 +476,6 @@ async def remove_duplicate_task_history_rows(

await db.execute(query=duplicate_query, values=values)

# @staticmethod
# async def update_expired_and_locked_actions(
# project_id: int, task_id: int, expiry_date: datetime, action_text: str, session
# ):
# """
# Sets auto unlock state to all not finished actions, that are older then the expiry date.
# Action is considered as a not finished, when it is in locked state and doesn't have action text
# :param project_id: Project ID in scope
# :param task_id: Task in scope
# :param expiry_date: Action created before this date is treated as expired
# :param action_text: Text which will be set for all changed actions
# :return:
# """
# result = await session.execute(
# select(TaskHistory).filter(
# TaskHistory.task_id == task_id,
# TaskHistory.project_id == project_id,
# TaskHistory.action_text.is_(None),
# TaskHistory.action.in_(
# [
# TaskAction.LOCKED_FOR_VALIDATION.name,
# TaskAction.LOCKED_FOR_MAPPING.name,
# TaskAction.EXTENDED_FOR_MAPPING.name,
# TaskAction.EXTENDED_FOR_VALIDATION.name,
# ]
# ),
# TaskHistory.action_date <= expiry_date,
# )
# )
# all_expired = result.scalars().all()
# for task_history in all_expired:
# unlock_action = (
# TaskAction.AUTO_UNLOCKED_FOR_MAPPING
# if task_history.action in ["LOCKED_FOR_MAPPING", "EXTENDED_FOR_MAPPING"]
# else TaskAction.AUTO_UNLOCKED_FOR_VALIDATION
# )

# task_history.set_auto_unlock_action(unlock_action)
# task_history.action_text = action_text

# await session.commit()

@staticmethod
async def update_expired_and_locked_actions(
task_id: int,
Expand Down Expand Up @@ -663,25 +621,6 @@ async def get_last_locked_action(project_id: int, task_id: int, db: Database):
db,
)

# @staticmethod
# async def get_last_locked_or_auto_unlocked_action(
# project_id: int, task_id: int, session
# ):
# """Gets the most recent task history record with locked or auto unlocked action for the task"""

# result = await TaskHistory.get_last_action_of_type(
# project_id,
# task_id,
# [
# TaskAction.LOCKED_FOR_MAPPING.name,
# TaskAction.LOCKED_FOR_VALIDATION.name,
# TaskAction.AUTO_UNLOCKED_FOR_MAPPING.name,
# TaskAction.AUTO_UNLOCKED_FOR_VALIDATION.name,
# ],
# session,
# )
# return result

@staticmethod
async def get_last_locked_or_auto_unlocked_action(
task_id: int, project_id: int, db: Database
Expand Down Expand Up @@ -913,48 +852,6 @@ async def get_tasks_by_status(project_id: int, status: str, db: Database):
async def auto_unlock_delta():
return parse_duration(settings.TASK_AUTOUNLOCK_AFTER)

# @staticmethod
# async def auto_unlock_tasks(project_id: int, db: Database):
# """Unlock all tasks locked for longer than the auto-unlock delta"""
# expiry_delta = await Task.auto_unlock_delta()
# lock_duration = (datetime.datetime.min + expiry_delta).time().isoformat()

# expiry_date = datetime.datetime.utcnow() - expiry_delta

# old_tasks = await db.execute(
# select(Task.id)
# .join(
# TaskHistory,
# (Task.id == TaskHistory.task_id)
# & (Task.project_id == TaskHistory.project_id),
# )
# .filter(Task.task_status.in_([1, 3]))
# .filter(
# TaskHistory.action.in_(
# [
# "EXTENDED_FOR_MAPPING",
# "EXTENDED_FOR_VALIDATION",
# "LOCKED_FOR_VALIDATION",
# "LOCKED_FOR_MAPPING",
# ]
# )
# )
# .filter(TaskHistory.action_text.is_(None))
# .filter(Task.project_id == project_id)
# .filter(TaskHistory.action_date <= expiry_date)
# )
# old_tasks = old_tasks.scalars().all()
# if not old_tasks:
# # no tasks older than the delta found, return without further processing
# return

# for old_task_id in old_tasks:
# task = await db.get(Task, (old_task_id, project_id))
# if task:
# await Task.auto_unlock_expired_tasks(
# expiry_date, lock_duration, db
# )

@staticmethod
async def auto_unlock_tasks(project_id: int, db: Database):
"""Unlock all tasks locked for longer than the auto-unlock delta."""
Expand Down Expand Up @@ -992,20 +889,6 @@ async def auto_unlock_tasks(project_id: int, db: Database):
for task_id in old_task_ids:
await Task.auto_unlock_expired_tasks(task_id, project_id, expiry_date, db)

# async def auto_unlock_expired_tasks(self, expiry_date, lock_duration, session):
# """Unlock all tasks locked before expiry date. Clears task lock if needed"""
# await TaskHistory.update_expired_and_locked_actions(
# self.project_id, self.id, expiry_date, lock_duration, session
# )
# last_action = await TaskHistory.get_last_locked_or_auto_unlocked_action(
# self.project_id, self.id, session
# )
# if last_action.action in [
# "AUTO_UNLOCKED_FOR_MAPPING",
# "AUTO_UNLOCKED_FOR_VALIDATION",
# ]:
# self.clear_lock()

@staticmethod
async def auto_unlock_expired_tasks(
task_id: int, project_id: int, expiry_date: datetime, db: Database
Expand Down
Loading