diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000000..5267c922c87 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,66 @@ +name: Docs +on: + push: + branches: + - 'master' + - 'develop' + pull_request: + types: [ready_for_review, opened, synchronize, reopened] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + generate_github_pages: + permissions: + contents: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + + - name: Generate CVAT SDK + run: | + pip3 install --user -r cvat-sdk/gen/requirements.txt + ./cvat-sdk/gen/generate.sh + + - name: Setup Hugo + run: | + wget https://github.com/gohugoio/hugo/releases/download/v0.110.0/hugo_extended_0.110.0_Linux-64bit.tar.gz + (mkdir hugo_extended_0.110.0_Linux-64bit && tar -xf hugo_extended_0.110.0_Linux-64bit.tar.gz -C hugo_extended_0.110.0_Linux-64bit) + + wget https://github.com/gohugoio/hugo/releases/download/v0.83.0/hugo_extended_0.83.0_Linux-64bit.tar.gz + (mkdir hugo_extended_0.83.0_Linux-64bit && tar -xf hugo_extended_0.83.0_Linux-64bit.tar.gz -C hugo_extended_0.83.0_Linux-64bit) + + mkdir hugo + cp hugo_extended_0.110.0_Linux-64bit/hugo hugo/hugo-0.110 + cp hugo_extended_0.83.0_Linux-64bit/hugo hugo/hugo-0.83 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: '18.x' + + - name: Install npm packages + working-directory: ./site + run: | + npm ci + + - name: Build docs + run: | + pip install -r site/requirements.txt + python site/process_sdk_docs.py + PATH="$PWD/hugo:$PATH" python site/build_docs.py + env: + HUGO_ENV: production + + - name: Deploy + if: github.ref == 'refs/heads/develop' + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./public + force_orphan: true diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 477968fd6dc..583657e0273 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -385,62 +385,9 @@ jobs: name: cypress_screenshots_${{ matrix.specs }} path: ${{ github.workspace }}/tests/cypress/screenshots - generate_github_pages: - needs: [rest_api_testing, unit_testing, e2e_testing] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - - name: Download CVAT SDK - uses: actions/download-artifact@v3 - with: - name: cvat_sdk - path: /tmp/cvat_sdk/ - - - name: Setup Hugo - run: | - wget https://github.com/gohugoio/hugo/releases/download/v0.110.0/hugo_extended_0.110.0_Linux-64bit.tar.gz - (mkdir hugo_extended_0.110.0_Linux-64bit && tar -xf hugo_extended_0.110.0_Linux-64bit.tar.gz -C hugo_extended_0.110.0_Linux-64bit) - - wget https://github.com/gohugoio/hugo/releases/download/v0.83.0/hugo_extended_0.83.0_Linux-64bit.tar.gz - (mkdir hugo_extended_0.83.0_Linux-64bit && tar -xf hugo_extended_0.83.0_Linux-64bit.tar.gz -C hugo_extended_0.83.0_Linux-64bit) - - mkdir hugo - cp hugo_extended_0.110.0_Linux-64bit/hugo hugo/hugo-0.110 - cp hugo_extended_0.83.0_Linux-64bit/hugo hugo/hugo-0.83 - - - name: Setup Node - uses: actions/setup-node@v4 - with: - node-version: '18.x' - - - name: Install npm packages - working-directory: ./site - run: | - npm ci - - - name: Build docs - run: | - pip install -r site/requirements.txt - python site/process_sdk_docs.py --input-dir /tmp/cvat_sdk/docs/ --site-root site/ - PATH="$PWD/hugo:$PATH" python site/build_docs.py - env: - HUGO_ENV: production - - - name: Deploy - if: github.ref == 'refs/heads/develop' - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./public - force_orphan: true - publish_dev_images: if: github.ref == 'refs/heads/develop' - needs: [rest_api_testing, unit_testing, e2e_testing, generate_github_pages] + needs: [rest_api_testing, unit_testing, e2e_testing] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/changelog.d/20240312_163919_roman_throttling_quality_reports.md b/changelog.d/20240312_163919_roman_throttling_quality_reports.md new file mode 100644 index 00000000000..40667c80c17 --- /dev/null +++ b/changelog.d/20240312_163919_roman_throttling_quality_reports.md @@ -0,0 +1,4 @@ +### Fixed + +- Made quality report update job scheduling more efficient + () diff --git a/changelog.d/20240315_183623_avaicode.md b/changelog.d/20240315_183623_avaicode.md new file mode 100644 index 00000000000..5d269795318 --- /dev/null +++ b/changelog.d/20240315_183623_avaicode.md @@ -0,0 +1,5 @@ +### Fixed + +- Changed interpolation behavior in `annotation.py`, now correctly keep the last frame +- Insert last frame if it is key to the track, fixes data corruption when tracks crossing more than 1 jobs + () diff --git a/changelog.d/20240320_125513_klakhov_event_for_logged_out_user.md b/changelog.d/20240320_125513_klakhov_event_for_logged_out_user.md new file mode 100644 index 00000000000..8fec0155e89 --- /dev/null +++ b/changelog.d/20240320_125513_klakhov_event_for_logged_out_user.md @@ -0,0 +1,3 @@ +### Fixed + +- Sending `/events` request from logged-out user () diff --git a/cvat-core/package.json b/cvat-core/package.json index 5e57ad15cc6..ab537343f24 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "15.0.1", + "version": "15.0.2", "type": "module", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", diff --git a/cvat-core/src/index.ts b/cvat-core/src/index.ts index e4bdde9ba09..2843767e254 100644 --- a/cvat-core/src/index.ts +++ b/cvat-core/src/index.ts @@ -160,7 +160,7 @@ export default interface CVATCore { enabled: boolean; onEmptyMaskOccurrence: () => void | null; }; - onOrganizationChange: typeof config.onOrganizationChange; + onOrganizationChange: (newOrgId: number | null) => void | null; globalObjectsCounter: typeof config.globalObjectsCounter; }, client: { diff --git a/cvat-core/src/logger.ts b/cvat-core/src/logger.ts index d13b1d7a6fe..1e8fdb17421 100644 --- a/cvat-core/src/logger.ts +++ b/cvat-core/src/logger.ts @@ -225,6 +225,8 @@ Object.defineProperties(Logger.prototype.save, { // potentially new events may be generated during saving // that is why we add this.collection this.collection = [...collectionToSend, ...this.collection]; + + throw error; } finally { this.saving = false; } diff --git a/cvat-ui/src/components/cvat-app.tsx b/cvat-ui/src/components/cvat-app.tsx index 9c60ae5adf8..22898f153b6 100644 --- a/cvat-ui/src/components/cvat-app.tsx +++ b/cvat-ui/src/components/cvat-app.tsx @@ -1,5 +1,5 @@ // Copyright (C) 2020-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -71,7 +71,7 @@ import showPlatformNotification, { } from 'utils/platform-checker'; import '../styles.scss'; import appConfig from 'config'; -import EventRecorder from 'utils/controls-logger'; +import EventRecorder from 'utils/event-recorder'; import { authQuery } from 'utils/auth-query'; import EmailConfirmationPage from './email-confirmation-pages/email-confirmed'; import EmailVerificationSentPage from './email-confirmation-pages/email-verification-sent'; @@ -149,7 +149,6 @@ class CVATApplication extends React.PureComponent window.document.hasFocus, userActivityCallback); - EventRecorder.initSave(); core.config.onOrganizationChange = (newOrgId: number | null) => { if (newOrgId === null) { @@ -254,7 +253,7 @@ class CVATApplication extends React.PureComponent { - core.logger.save().finally(() => { + const scheduleSave = (): void => { this.#savingTimeout = null; this.initSave(); - }); + }; + core.logger.save() + .then(scheduleSave) + .catch((error) => { + if (error?.code === 401) { + this.cancelSave(); + } else { + scheduleSave(); + } + }); }, CONTROLS_LOGS_INTERVAL); } + public cancelSave(): void { + if (this.#savingTimeout) { + window.clearTimeout(this.#savingTimeout); + this.#savingTimeout = null; + } + } + private filterClassName(cls: string): string { if (typeof cls === 'string') { return cls.split(' ').filter((_cls: string) => _cls.startsWith('cvat')).join(' '); diff --git a/cvat/apps/analytics_report/report/create.py b/cvat/apps/analytics_report/report/create.py index fdf44e6d666..738cd9cfc06 100644 --- a/cvat/apps/analytics_report/report/create.py +++ b/cvat/apps/analytics_report/report/create.py @@ -33,6 +33,7 @@ JobObjects, ) from cvat.apps.engine.models import Job, Project, Task +from cvat.utils.background_jobs import schedule_job_with_throttling def get_empty_report(): @@ -75,12 +76,6 @@ def _make_queue_job_id_base(self, obj) -> str: def _make_custom_analytics_check_job_id(self) -> str: return uuid4().hex - def _make_queue_job_id(self, obj, start_time: timezone.datetime) -> str: - return f"{self._make_queue_job_id_base(obj)}-{start_time.timestamp()}" - - def _make_autoupdate_blocker_key(self, obj) -> str: - return f"cvat:analytics:autoupdate-blocker:{self._make_queue_job_id_base(obj)}" - @classmethod def _get_last_report_time(cls, obj): try: @@ -121,27 +116,14 @@ def schedule_analytics_report_autoupdate_job(self, *, job=None, task=None, proje target_obj = project cvat_project_id = project.id - with django_rq.get_connection(settings.CVAT_QUEUES.ANALYTICS_REPORTS.value) as connection: - # The blocker key is used to avoid scheduling a report update job - # for every single change. The first time this method is called - # for a given object, we schedule the job and create a blocker - # that expires at the same time as the job is supposed to start. - # Until the blocker expires, we don't schedule any more jobs. - blocker_key = self._make_autoupdate_blocker_key(target_obj) - if connection.exists(blocker_key): - return - - queue_job_id = self._make_queue_job_id(target_obj, next_job_time) - - self._get_scheduler().enqueue_at( - next_job_time, - self._check_analytics_report, - cvat_task_id=cvat_task_id, - cvat_project_id=cvat_project_id, - job_id=queue_job_id, - ) - - connection.set(blocker_key, queue_job_id, exat=next_job_time) + schedule_job_with_throttling( + settings.CVAT_QUEUES.ANALYTICS_REPORTS.value, + self._make_queue_job_id_base(target_obj), + next_job_time, + self._check_analytics_report, + cvat_task_id=cvat_task_id, + cvat_project_id=cvat_project_id, + ) def schedule_analytics_check_job(self, *, job=None, task=None, project=None, user_id): rq_id = self._make_custom_analytics_check_job_id() diff --git a/cvat/apps/analytics_report/views.py b/cvat/apps/analytics_report/views.py index 4317915a0db..4723a705cf2 100644 --- a/cvat/apps/analytics_report/views.py +++ b/cvat/apps/analytics_report/views.py @@ -29,7 +29,7 @@ def get_queryset(self): @extend_schema( operation_id="analytics_create_report", - summary="Creates a analytics report asynchronously and allows to check request status", + summary="Create an analytics report", parameters=[ OpenApiParameter( "rq_id", @@ -144,7 +144,7 @@ def create(self, request, *args, **kwargs): return Response(serializer.data, status=status.HTTP_201_CREATED) @extend_schema( - summary="Method returns analytics report", + summary="Get an analytics report", methods=["GET"], operation_id="analytics_get_reports", description="Receive analytics report", diff --git a/cvat/apps/dataset_manager/annotation.py b/cvat/apps/dataset_manager/annotation.py index 2cb8a5a2f8b..75262299929 100644 --- a/cvat/apps/dataset_manager/annotation.py +++ b/cvat/apps/dataset_manager/annotation.py @@ -107,13 +107,16 @@ def filter_track_shapes(shapes): if len(segment_shapes) < len(track['shapes']): interpolated_shapes = TrackManager.get_interpolated_shapes( - track, start, stop, dimension) + track, start, stop + 1, dimension) scoped_shapes = filter_track_shapes(interpolated_shapes) if scoped_shapes: + last_key = sorted(track['shapes'], key=lambda s: s['frame'])[-1]['frame'] if not scoped_shapes[0]['keyframe']: segment_shapes.insert(0, scoped_shapes[0]) - if scoped_shapes[-1]['keyframe'] and \ + if last_key >= stop and scoped_shapes[-1]['points'] != segment_shapes[-1]['points']: + segment_shapes.append(scoped_shapes[-1]) + elif scoped_shapes[-1]['keyframe'] and \ scoped_shapes[-1]['outside']: segment_shapes.append(scoped_shapes[-1]) elif stop + 1 < len(interpolated_shapes) and \ diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index bf65fdb1340..53867cbe890 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -103,7 +103,7 @@ def get_serializer(self, *args, **kwargs): pass @staticmethod - @extend_schema(summary='Method provides basic CVAT information', + @extend_schema(summary='Get basic CVAT information', responses={ '200': AboutSerializer, }) @@ -129,7 +129,7 @@ def about(request): @staticmethod @extend_schema( - summary='Returns all files and folders that are on the server along specified path', + summary='List files/directories in the mounted share', parameters=[ OpenApiParameter('directory', description='Directory to browse', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR), @@ -180,7 +180,7 @@ def share(request): @staticmethod @extend_schema( - summary='Method provides the list of supported annotations formats', + summary='Get supported annotation formats', responses={ '200': DatasetFormatsSerializer, }) @@ -191,7 +191,7 @@ def annotation_formats(request): @staticmethod @extend_schema( - summary='Method provides allowed plugins', + summary='Get enabled plugins', responses={ '200': PluginsSerializer, }) @@ -208,29 +208,29 @@ def plugins(request): @extend_schema(tags=['projects']) @extend_schema_view( list=extend_schema( - summary='Returns a paginated list of projects', + summary='List projects', responses={ '200': ProjectReadSerializer(many=True), }), create=extend_schema( - summary='Method creates a new project', + summary='Create a project', request=ProjectWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ '201': ProjectReadSerializer, # check ProjectWriteSerializer.to_representation }), retrieve=extend_schema( - summary='Method returns details of a specific project', + summary='Get project details', responses={ '200': ProjectReadSerializer, }), destroy=extend_schema( - summary='Method deletes a specific project', + summary='Delete a project', responses={ '204': OpenApiResponse(description='The project has been deleted'), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a project', + summary='Update a project', request=ProjectWriteSerializer(partial=True), responses={ '200': ProjectReadSerializer, # check ProjectWriteSerializer.to_representation @@ -279,7 +279,7 @@ def perform_create(self, serializer, **kwargs): # Required for the extra summary information added in the queryset serializer.instance = self.get_queryset().get(pk=serializer.instance.pk) - @extend_schema(methods=['GET'], summary='Export project as a dataset in a specific format', + @extend_schema(methods=['GET'], summary='Export a project as a dataset / Check dataset import status', description=textwrap.dedent(""" To check the status of the process of importing a project dataset from a file: @@ -314,7 +314,7 @@ def perform_create(self, serializer, **kwargs): '405': OpenApiResponse(description='Format is not available'), }) @extend_schema(methods=['POST'], - summary='Import dataset in specific format as a project or check status of dataset import process', + summary='Import a dataset into a project', description=textwrap.dedent(""" The request POST /api/projects/id/dataset will initiate file upload and will create the rq job on the server in which the process of dataset import from a file @@ -448,7 +448,7 @@ def upload_finished(self, request): return Response(data='Unknown upload was finished', status=status.HTTP_400_BAD_REQUEST) - @extend_schema(summary='Method allows to download project annotations', + @extend_schema(summary='Get project annotations', parameters=[ OpenApiParameter('format', description='Desired output format name\n' 'You can get the list of supported formats at:\n/server/annotation/formats', @@ -489,7 +489,7 @@ def annotations(self, request, pk): get_data=dm.task.get_job_data, ) - @extend_schema(summary='Methods creates a backup copy of a project', + @extend_schema(summary='Back up a project', parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, description='Used to start downloading process after backup file had been created', @@ -514,7 +514,7 @@ def annotations(self, request, pk): def export_backup(self, request, pk=None): return self.serialize(request, backup.export) - @extend_schema(methods=['POST'], summary='Methods create a project from a backup', + @extend_schema(methods=['POST'], summary='Recreate a project from a backup', description=textwrap.dedent(""" The backup import process is as follows: @@ -562,7 +562,7 @@ def import_backup(self, request, pk=None): def append_backup_chunk(self, request, file_id): return self.append_tus_chunk(request, file_id) - @extend_schema(summary='Method returns a preview image for the project', + @extend_schema(summary='Get a preview image for a project', responses={ '200': OpenApiResponse(description='Project image preview'), '404': OpenApiResponse(description='Project image preview not found'), @@ -728,29 +728,34 @@ def __call__(self, request, start, stop, db_data): @extend_schema(tags=['tasks']) @extend_schema_view( list=extend_schema( - summary='Returns a paginated list of tasks', + summary='List tasks', responses={ '200': TaskReadSerializer(many=True), }), create=extend_schema( - summary='Method creates a new task in a database without any attached images and videos', + summary='Create a task', + description=textwrap.dedent("""\ + The new task will not have any attached images or videos. + To attach them, use the /api/tasks//data endpoint. + """), request=TaskWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ '201': TaskReadSerializer, # check TaskWriteSerializer.to_representation }), retrieve=extend_schema( - summary='Method returns details of a specific task', + summary='Get task details', responses={ '200': TaskReadSerializer }), destroy=extend_schema( - summary='Method deletes a specific task, all attached jobs, annotations, and data', + summary='Delete a task', + description='All attached jobs, annotations and data will be deleted as well.', responses={ '204': OpenApiResponse(description='The task has been deleted'), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a task', + summary='Update a task', request=TaskWriteSerializer(partial=True), responses={ '200': TaskReadSerializer, # check TaskWriteSerializer.to_representation @@ -807,7 +812,7 @@ def get_queryset(self): return queryset - @extend_schema(summary='Method recreates a task from an attached task backup file', + @extend_schema(summary='Recreate a task from a backup', description=textwrap.dedent(""" The backup import process is as follows: @@ -852,7 +857,7 @@ def import_backup(self, request, pk=None): def append_backup_chunk(self, request, file_id): return self.append_tus_chunk(request, file_id) - @extend_schema(summary='Method backup a specified task', + @extend_schema(summary='Back up a task', parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, description='Used to start downloading process after backup file had been created', @@ -1102,9 +1107,9 @@ def _handle_upload_backup(request): assert _UPLOAD_FILE_ORDER_FIELD in DataSerializer().fields @extend_schema(methods=['POST'], - summary="Method permanently attaches data (images, video, etc.) to a task", + summary="Attach data to a task", description=textwrap.dedent("""\ - Allows to upload data to a task. + Allows to upload data (images, video, etc.) to a task. Supports the TUS open file uploading protocol (https://tus.io/). Supports the following protocols: @@ -1153,6 +1158,8 @@ def _handle_upload_backup(request): After all data is sent, the operation status can be retrieved via the /status endpoint. + + Once data is attached to a task, it cannot be detached or replaced. """.format_map( {'upload_file_order_field': _UPLOAD_FILE_ORDER_FIELD} )), @@ -1170,7 +1177,7 @@ def _handle_upload_backup(request): '202': OpenApiResponse(description=''), }) @extend_schema(methods=['GET'], - summary='Method returns data for a specific task', + summary='Get data of a task', parameters=[ OpenApiParameter('type', location=OpenApiParameter.QUERY, required=False, type=OpenApiTypes.STR, enum=['chunk', 'frame', 'context_image'], @@ -1224,7 +1231,7 @@ def append_data_chunk(self, request, pk, file_id): self._object = self.get_object() return self.append_tus_chunk(request, file_id) - @extend_schema(methods=['GET'], summary='Method allows to download task annotations', + @extend_schema(methods=['GET'], summary='Get task annotations', parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, description="Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats"), @@ -1253,13 +1260,13 @@ def append_data_chunk(self, request, pk, file_id): '400': OpenApiResponse(description='Exporting without data is not allowed'), '405': OpenApiResponse(description='Format is not available'), }) - @extend_schema(methods=['PUT'], summary='Method allows to upload task annotations or edit existing annotations', + @extend_schema(methods=['PUT'], summary='Replace task annotations / Get annotation import status', description=textwrap.dedent(""" - To check the status of the process of uploading a task annotations from a file: + To check the status of an import request: - After initiating the annotations upload, you will receive an rq_id parameter. + After initiating the annotation import, you will receive an rq_id parameter. Make sure to include this parameter as a query parameter in your subsequent - PUT /api/tasks/id/annotations requests to track the status of the annotations upload. + PUT /api/tasks/id/annotations requests to track the status of the import. """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, @@ -1273,16 +1280,16 @@ def append_data_chunk(self, request, pk, file_id): resource_type_field_name=None ), responses={ - '201': OpenApiResponse(description='Uploading has finished'), - '202': OpenApiResponse(description='Uploading has been started'), + '201': OpenApiResponse(description='Import has finished'), + '202': OpenApiResponse(description='Import is in progress'), '405': OpenApiResponse(description='Format is not available'), }) @extend_schema(methods=['POST'], - summary="Method allows to initialize the process of upload task annotations from a local or a cloud storage file", + summary="Import annotations into a task", description=textwrap.dedent(""" - The request POST /api/tasks/id/annotations will initiate file upload and will create - the rq job on the server in which the process of annotations uploading from file - will be carried out. Please, use the PUT /api/tasks/id/annotations endpoint for checking status of the process. + The request POST /api/tasks/id/annotations will initiate the import and will create + the rq job on the server in which the import will be carried out. + Please, use the PUT /api/tasks/id/annotations endpoint for checking status of the process. """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, @@ -1308,7 +1315,7 @@ def append_data_chunk(self, request, pk, file_id): '202': OpenApiResponse(RqIdSerializer, description='Uploading has been started'), '405': OpenApiResponse(description='Format is not available'), }) - @extend_schema(methods=['PATCH'], summary='Method performs a partial update of annotations in a specific task', + @extend_schema(methods=['PATCH'], summary='Update task annotations', parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, required=True, type=OpenApiTypes.STR, enum=['create', 'update', 'delete']), @@ -1317,7 +1324,7 @@ def append_data_chunk(self, request, pk, file_id): responses={ '200': LabeledDataSerializer, }) - @extend_schema(methods=['DELETE'], summary='Method deletes all annotations for a specific task', + @extend_schema(methods=['DELETE'], summary='Delete task annotations', responses={ '204': OpenApiResponse(description='The annotation has been deleted'), }) @@ -1393,7 +1400,7 @@ def append_annotations_chunk(self, request, pk, file_id): return self.append_tus_chunk(request, file_id) @extend_schema( - summary='When task is being created the method returns information about a status of the creation process', + summary='Get the creation status of a task', responses={ '200': RqStatusSerializer, }) @@ -1432,11 +1439,11 @@ def _get_rq_response(queue, job_id): return response - @extend_schema(summary='Method provides a meta information about media files which are related with the task', + @extend_schema(methods=['GET'], summary='Get metainformation for media files in a task', responses={ '200': DataMetaReadSerializer, }) - @extend_schema(methods=['PATCH'], summary='Method performs an update of data meta fields (deleted frames)', + @extend_schema(methods=['PATCH'], summary='Update metainformation for media files in a task', request=DataMetaWriteSerializer, responses={ '200': DataMetaReadSerializer, @@ -1515,7 +1522,7 @@ def dataset_export(self, request, pk): return Response(data="Exporting a dataset from a task without data is not allowed", status=status.HTTP_400_BAD_REQUEST) - @extend_schema(summary='Method returns a preview image for the task', + @extend_schema(summary='Get a preview image for a task', responses={ '200': OpenApiResponse(description='Task image preview'), '404': OpenApiResponse(description='Task image preview not found'), @@ -1541,30 +1548,32 @@ def preview(self, request, pk): @extend_schema(tags=['jobs']) @extend_schema_view( create=extend_schema( - summary='Method creates a new job in the task', + summary='Create a job', request=JobWriteSerializer, responses={ '201': JobReadSerializer, # check JobWriteSerializer.to_representation }), retrieve=extend_schema( - summary='Method returns details of a job', + summary='Get job details', responses={ '200': JobReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of jobs', + summary='List jobs', responses={ '200': JobReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a job', + summary='Update a job', request=JobWriteSerializer(partial=True), responses={ '200': JobReadSerializer, # check JobWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes a job and its related annotations', + summary='Delete a job', description=textwrap.dedent("""\ + Related annotations will be deleted as well. + Please note, that not every job can be removed. Currently, it is only available for Ground Truth jobs. """), @@ -1657,8 +1666,11 @@ def upload_finished(self, request): status=status.HTTP_400_BAD_REQUEST) @extend_schema(methods=['GET'], - summary="Method returns annotations for a specific job as a JSON document. " - "If format is specified, a zip archive is returned.", + summary="Get job annotations", + description=textwrap.dedent("""\ + If format is specified, a ZIP archive will be returned. Otherwise, + the annotations will be returned as a JSON document. + """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, description='Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats', @@ -1688,11 +1700,11 @@ def upload_finished(self, request): '405': OpenApiResponse(description='Format is not available'), }) @extend_schema(methods=['POST'], - summary='Method allows to initialize the process of the job annotation upload from a local file or a cloud storage', + summary='Import annotations into a job', description=textwrap.dedent(""" - The request POST /api/jobs/id/annotations will initiate file upload and will create - the rq job on the server in which the process of annotations uploading from file - will be carried out. Please, use the PUT /api/jobs/id/annotations endpoint for checking status of the process. + The request POST /api/jobs/id/annotations will initiate the import and will create + the rq job on the server in which the import will be carried out. + Please, use the PUT /api/jobs/id/annotations endpoint for checking status of the process. """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, @@ -1715,14 +1727,13 @@ def upload_finished(self, request): '405': OpenApiResponse(description='Format is not available'), }) @extend_schema(methods=['PUT'], - summary='Method performs an update of all annotations in a specific job ' - 'or used for uploading annotations from a file', + summary='Replace job annotations / Get annotation import status', description=textwrap.dedent(""" - To check the status of the process of uploading a job annotations from a file: + To check the status of an import request: - After initiating the annotations upload, you will receive an rq_id parameter. + After initiating the annotation import, you will receive an rq_id parameter. Make sure to include this parameter as a query parameter in your subsequent - PUT /api/jobs/id/annotations requests to track the status of the annotations upload. + PUT /api/jobs/id/annotations requests to track the status of the import. """), parameters=[ OpenApiParameter('format', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=False, @@ -1746,11 +1757,11 @@ def upload_finished(self, request): resource_type_field_name=None ), responses={ - '201': OpenApiResponse(description='Uploading has finished'), - '202': OpenApiResponse(description='Uploading has been started'), + '201': OpenApiResponse(description='Import has finished'), + '202': OpenApiResponse(description='Import is in progress'), '405': OpenApiResponse(description='Format is not available'), }) - @extend_schema(methods=['PATCH'], summary='Method performs a partial update of annotations in a specific job', + @extend_schema(methods=['PATCH'], summary='Update job annotations', parameters=[ OpenApiParameter('action', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, required=True, enum=['create', 'update', 'delete']) @@ -1759,7 +1770,7 @@ def upload_finished(self, request): responses={ '200': OpenApiResponse(description='Annotations successfully uploaded'), }) - @extend_schema(methods=['DELETE'], summary='Method deletes all annotations for a specific job', + @extend_schema(methods=['DELETE'], summary='Delete job annotations', responses={ '204': OpenApiResponse(description='The annotation has been deleted'), }) @@ -1872,7 +1883,7 @@ def dataset_export(self, request, pk): callback=dm.views.export_job_as_dataset ) - @extend_schema(summary='Method returns data for a specific job', + @extend_schema(summary='Get data of a job', parameters=[ OpenApiParameter('type', description='Specifies the type of the requested data', location=OpenApiParameter.QUERY, required=False, type=OpenApiTypes.STR, @@ -1901,11 +1912,11 @@ def data(self, request, pk): db_job.segment.stop_frame, db_job.segment.task.data) - @extend_schema(summary='Method provides a meta information about media files which are related with the job', + @extend_schema(methods=['GET'], summary='Get metainformation for media files in a job', responses={ '200': DataMetaReadSerializer, }) - @extend_schema(methods=['PATCH'], summary='Method performs an update of data meta fields (deleted frames)', + @extend_schema(methods=['PATCH'], summary='Update metainformation for media files in a job', request=DataMetaWriteSerializer, responses={ '200': DataMetaReadSerializer, @@ -1983,7 +1994,7 @@ def metadata(self, request, pk): serializer = DataMetaReadSerializer(db_data) return Response(serializer.data) - @extend_schema(summary='Method returns a preview image for the job', + @extend_schema(summary='Get a preview image for a job', responses={ '200': OpenApiResponse(description='Job image preview'), }) @@ -2005,30 +2016,30 @@ def preview(self, request, pk): @extend_schema(tags=['issues']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of an issue', + summary='Get issue details', responses={ '200': IssueReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of issues', + summary='List issues', responses={ '200': IssueReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in an issue', + summary='Update an issue', request=IssueWriteSerializer(partial=True), responses={ '200': IssueReadSerializer, # check IssueWriteSerializer.to_representation }), create=extend_schema( - summary='Method creates an issue', + summary='Create an issue', request=IssueWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ '201': IssueReadSerializer, # check IssueWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes an issue', + summary='Delete an issue', responses={ '204': OpenApiResponse(description='The issue has been deleted'), }) @@ -2076,30 +2087,30 @@ def perform_create(self, serializer, **kwargs): @extend_schema(tags=['comments']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of a comment', + summary='Get comment details', responses={ '200': CommentReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of comments', + summary='List comments', responses={ '200': CommentReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a comment', + summary='Update a comment', request=CommentWriteSerializer(partial=True), responses={ '200': CommentReadSerializer, # check CommentWriteSerializer.to_representation }), create=extend_schema( - summary='Method creates a comment', + summary='Create a comment', request=CommentWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ '201': CommentReadSerializer, # check CommentWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes a comment', + summary='Delete a comment', responses={ '204': OpenApiResponse(description='The comment has been deleted'), }) @@ -2147,12 +2158,12 @@ def perform_create(self, serializer, **kwargs): @extend_schema(tags=['labels']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of a label', + summary='Get label details', responses={ '200': LabelSerializer, }), list=extend_schema( - summary='Method returns a paginated list of labels', + summary='List labels', parameters=[ # These filters are implemented differently from others OpenApiParameter('job_id', type=OpenApiTypes.INT, @@ -2167,15 +2178,15 @@ def perform_create(self, serializer, **kwargs): '200': LabelSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a label' - 'To modify a sublabel, please use the PATCH method of the parent label', + summary='Update a label', + description='To modify a sublabel, please use the PATCH method of the parent label.', request=LabelSerializer(partial=True), responses={ '200': LabelSerializer, }), destroy=extend_schema( - summary='Method deletes a label. ' - 'To delete a sublabel, please use the PATCH method of the parent label', + summary='Delete a label', + description='To delete a sublabel, please use the PATCH method of the parent label.', responses={ '204': OpenApiResponse(description='The label has been deleted'), }) @@ -2294,7 +2305,7 @@ def perform_destroy(self, instance: models.Label): @extend_schema(tags=['users']) @extend_schema_view( list=extend_schema( - summary='Method returns a paginated list of users', + summary='List users', responses={ '200': PolymorphicProxySerializer( component_name='MetaUser', @@ -2307,7 +2318,7 @@ def perform_destroy(self, instance: models.Label): ), }), retrieve=extend_schema( - summary='Method provides information of a specific user', + summary='Get user details', responses={ '200': PolymorphicProxySerializer( component_name='MetaUser', @@ -2319,7 +2330,7 @@ def perform_destroy(self, instance: models.Label): ), }), partial_update=extend_schema( - summary='Method updates chosen fields of a user', + summary='Update a user', responses={ '200': PolymorphicProxySerializer( component_name='MetaUser', @@ -2331,7 +2342,7 @@ def perform_destroy(self, instance: models.Label): ), }), destroy=extend_schema( - summary='Method deletes a specific user from the server', + summary='Delete a user', responses={ '204': OpenApiResponse(description='The user has been deleted'), }) @@ -2372,7 +2383,7 @@ def get_serializer_class(self): else: return BasicUserSerializer - @extend_schema(summary='Method returns an instance of a user who is currently authorized', + @extend_schema(summary='Get details of the current user', responses={ '200': PolymorphicProxySerializer(component_name='MetaUser', serializers=[ @@ -2391,28 +2402,28 @@ def self(self, request): @extend_schema(tags=['cloudstorages']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of a specific cloud storage', + summary='Get cloud storage details', responses={ '200': CloudStorageReadSerializer, }), list=extend_schema( - summary='Returns a paginated list of storages', + summary='List cloud storages', responses={ '200': CloudStorageReadSerializer(many=True), }), destroy=extend_schema( - summary='Method deletes a specific cloud storage', + summary='Delete a cloud storage', responses={ '204': OpenApiResponse(description='The cloud storage has been removed'), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a cloud storage instance', + summary='Update a cloud storage', request=CloudStorageWriteSerializer(partial=True), responses={ '200': CloudStorageReadSerializer, # check CloudStorageWriteSerializer.to_representation }), create=extend_schema( - summary='Method creates a cloud storage with a specified characteristics', + summary='Create a cloud storage', request=CloudStorageWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ @@ -2479,7 +2490,7 @@ def create(self, request, *args, **kwargs): response = HttpResponseBadRequest(str(ex)) return response - @extend_schema(summary='Method returns the content of the cloud storage', + @extend_schema(summary='Get cloud storage content', parameters=[ OpenApiParameter('manifest_path', description='Path to the manifest file in a cloud storage', location=OpenApiParameter.QUERY, type=OpenApiTypes.STR), @@ -2554,7 +2565,7 @@ def content_v2(self, request, pk): return Response("An internal error has occurred", status=status.HTTP_500_INTERNAL_SERVER_ERROR) - @extend_schema(summary='Method returns a preview image from a cloud storage', + @extend_schema(summary='Get a preview image for a cloud storage', responses={ '200': OpenApiResponse(description='Cloud Storage preview'), '400': OpenApiResponse(description='Failed to get cloud storage preview'), @@ -2590,7 +2601,7 @@ def preview(self, request, pk): return Response("An internal error has occurred", status=status.HTTP_500_INTERNAL_SERVER_ERROR) - @extend_schema(summary='Method returns a cloud storage status', + @extend_schema(summary='Get the status of a cloud storage', responses={ '200': OpenApiResponse(response=OpenApiTypes.STR, description='Cloud Storage status (AVAILABLE | NOT_FOUND | FORBIDDEN)'), }) @@ -2609,7 +2620,7 @@ def status(self, request, pk): msg = str(ex) return HttpResponseBadRequest(msg) - @extend_schema(summary='Method returns allowed actions for the cloud storage', + @extend_schema(summary='Get allowed actions for a cloud storage', responses={ '200': OpenApiResponse(response=OpenApiTypes.STR, description='Cloud Storage actions (GET | PUT | DELETE)'), }) @@ -2634,7 +2645,7 @@ def actions(self, request, pk): @extend_schema(tags=['assets']) @extend_schema_view( create=extend_schema( - summary='Method saves new asset on the server and attaches it to a corresponding guide', + summary='Create an asset', request={ 'multipart/form-data': { 'type': 'object', @@ -2650,12 +2661,12 @@ def actions(self, request, pk): '201': AssetReadSerializer, }), retrieve=extend_schema( - summary='Method returns an asset file', + summary='Get an asset', responses={ '200': OpenApiResponse(description='Asset file') }), destroy=extend_schema( - summary='Method deletes a specific asset from the server', + summary='Delete an asset', responses={ '204': OpenApiResponse(description='The asset has been deleted'), }), @@ -2741,23 +2752,25 @@ def perform_destroy(self, instance): @extend_schema(tags=['guides']) @extend_schema_view( create=extend_schema( - summary='Method creates a new annotation guide binded to a project or to a task', + summary='Create an annotation guide', + description='The new guide will be bound either to a project or a task, depending on parameters.', request=AnnotationGuideWriteSerializer, responses={ '201': AnnotationGuideReadSerializer, }), retrieve=extend_schema( - summary='Method returns details of a specific annotation guide', + summary='Get annotation guide details', responses={ '200': AnnotationGuideReadSerializer, }), destroy=extend_schema( - summary='Method deletes a specific annotation guide and all attached assets', + summary='Delete an annotation guide', + description='This also deletes all assets attached to the guide.', responses={ '204': OpenApiResponse(description='The annotation guide has been deleted'), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in an annotation guide', + summary='Update an annotation guide', request=AnnotationGuideWriteSerializer(partial=True), responses={ '200': AnnotationGuideReadSerializer, # check TaskWriteSerializer.to_representation diff --git a/cvat/apps/events/views.py b/cvat/apps/events/views.py index f208efb9e81..1281592d9e7 100644 --- a/cvat/apps/events/views.py +++ b/cvat/apps/events/views.py @@ -19,7 +19,7 @@ class EventsViewSet(viewsets.ViewSet): serializer_class = None - @extend_schema(summary='Method saves logs from a client on the server', + @extend_schema(summary='Log client events', methods=['POST'], description='Sends logs to the Clickhouse if it is connected', parameters=ORGANIZATION_OPEN_API_PARAMETERS, @@ -37,9 +37,9 @@ def create(self, request): return Response(serializer.data, status=status.HTTP_201_CREATED) - @extend_schema(summary='Method returns csv log file ', + @extend_schema(summary='Get an event log', methods=['GET'], - description='Receive logs from the server', + description='The log is returned in the CSV format.', parameters=[ OpenApiParameter('org_id', location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, required=False, description="Filter events by organization ID"), diff --git a/cvat/apps/organizations/views.py b/cvat/apps/organizations/views.py index feceea06607..0f45997ef2d 100644 --- a/cvat/apps/organizations/views.py +++ b/cvat/apps/organizations/views.py @@ -31,29 +31,29 @@ @extend_schema(tags=['organizations']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of an organization', + summary='Get organization details', responses={ '200': OrganizationReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of organizations', + summary='List organizations', responses={ '200': OrganizationReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in an organization', + summary='Update an organization', request=OrganizationWriteSerializer(partial=True), responses={ '200': OrganizationReadSerializer, # check OrganizationWriteSerializer.to_representation }), create=extend_schema( - summary='Method creates an organization', + summary='Create an organization', request=OrganizationWriteSerializer, responses={ '201': OrganizationReadSerializer, # check OrganizationWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes an organization', + summary='Delete an organization', responses={ '204': OpenApiResponse(description='The organization has been deleted'), }) @@ -100,23 +100,23 @@ class Meta: @extend_schema(tags=['memberships']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of a membership', + summary='Get membership details', responses={ '200': MembershipReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of memberships', + summary='List memberships', responses={ '200': MembershipReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in a membership', + summary='Update a membership', request=MembershipWriteSerializer(partial=True), responses={ '200': MembershipReadSerializer, # check MembershipWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes a membership', + summary='Delete a membership', responses={ '204': OpenApiResponse(description='The membership has been deleted'), }) @@ -151,37 +151,37 @@ def get_queryset(self): @extend_schema(tags=['invitations']) @extend_schema_view( retrieve=extend_schema( - summary='Method returns details of an invitation', + summary='Get invitation details', responses={ '200': InvitationReadSerializer, }), list=extend_schema( - summary='Method returns a paginated list of invitations', + summary='List invitations', responses={ '200': InvitationReadSerializer(many=True), }), partial_update=extend_schema( - summary='Methods does a partial update of chosen fields in an invitation', + summary='Update an invitation', request=InvitationWriteSerializer(partial=True), responses={ '200': InvitationReadSerializer, # check InvitationWriteSerializer.to_representation }), create=extend_schema( - summary='Method creates an invitation', + summary='Create an invitation', request=InvitationWriteSerializer, parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ '201': InvitationReadSerializer, # check InvitationWriteSerializer.to_representation }), destroy=extend_schema( - summary='Method deletes an invitation', + summary='Delete an invitation', responses={ '204': OpenApiResponse(description='The invitation has been deleted'), }), accept=extend_schema( operation_id='invitations_accept', request=None, - summary='Method registers user and accepts invitation to organization', + summary='Accept an invitation', responses={ '200': OpenApiResponse(response=AcceptInvitationReadSerializer, description='The invitation is accepted'), '400': OpenApiResponse(description='The invitation is expired or already accepted'), @@ -189,13 +189,13 @@ def get_queryset(self): decline=extend_schema( operation_id='invitations_decline', request=None, - summary='Method declines the invitation to organization', + summary='Decline an invitation', responses={ '204': OpenApiResponse(description='The invitation has been declined'), }), resend=extend_schema( operation_id='invitations_resend', - summary='Method resends the invitation to organization', + summary='Resend an invitation', request=None, responses={ '204': OpenApiResponse(description='Invitation has been sent'), diff --git a/cvat/apps/quality_control/quality_reports.py b/cvat/apps/quality_control/quality_reports.py index 562173f1441..f497ddf3feb 100644 --- a/cvat/apps/quality_control/quality_reports.py +++ b/cvat/apps/quality_control/quality_reports.py @@ -50,6 +50,7 @@ AnnotationConflictType, AnnotationType, ) +from cvat.utils.background_jobs import schedule_job_with_throttling class _Serializable: @@ -2065,18 +2066,12 @@ def _get_scheduler(self): def _get_queue(self): return django_rq.get_queue(settings.CVAT_QUEUES.QUALITY_REPORTS.value) - def _make_queue_job_prefix(self, task: Task) -> str: - return f"{self._QUEUE_JOB_PREFIX}{task.id}-" + def _make_queue_job_id_base(self, task: Task) -> str: + return f"{self._QUEUE_JOB_PREFIX}{task.id}" def _make_custom_quality_check_job_id(self) -> str: return uuid4().hex - def _make_initial_queue_job_id(self, task: Task) -> str: - return f"{self._make_queue_job_prefix(task)}initial" - - def _make_regular_queue_job_id(self, task: Task, start_time: timezone.datetime) -> str: - return f"{self._make_queue_job_prefix(task)}{start_time.timestamp()}" - @classmethod def _get_last_report_time(cls, task: Task) -> Optional[timezone.datetime]: report = models.QualityReport.objects.filter(task=task).order_by("-created_date").first() @@ -2084,41 +2079,6 @@ def _get_last_report_time(cls, task: Task) -> Optional[timezone.datetime]: return report.created_date return None - def _find_next_job_id( - self, existing_job_ids: Sequence[str], task: Task, *, now: timezone.datetime - ) -> str: - job_id_prefix = self._make_queue_job_prefix(task) - - def _get_timestamp(job_id: str) -> timezone.datetime: - job_timestamp = job_id.split(job_id_prefix, maxsplit=1)[-1] - if job_timestamp == "initial": - return timezone.datetime.min.replace(tzinfo=timezone.utc) - else: - return timezone.datetime.fromtimestamp(float(job_timestamp), tz=timezone.utc) - - max_job_id = max( - (j for j in existing_job_ids if j.startswith(job_id_prefix)), - key=_get_timestamp, - default=None, - ) - max_timestamp = _get_timestamp(max_job_id) if max_job_id else None - - last_update_time = self._get_last_report_time(task) - if last_update_time is None: - # Report has never been computed, is queued, or is being computed - queue_job_id = self._make_initial_queue_job_id(task) - elif max_timestamp is not None and now < max_timestamp: - # Reuse the existing next job - queue_job_id = max_job_id - else: - # Add an updating job in the queue in the next time frame - delay = self._get_quality_check_job_delay() - intervals = max(1, 1 + (now - last_update_time) // delay) - next_update_time = last_update_time + delay * intervals - queue_job_id = self._make_regular_queue_job_id(task, next_update_time) - - return queue_job_id - class QualityReportsNotAvailable(Exception): pass @@ -2148,11 +2108,6 @@ def schedule_quality_autoupdate_job(self, task: Task): This function schedules a quality report autoupdate job """ - # The algorithm is lock-free. It should keep the following properties: - # - job names are stable between potential writers - # - if multiple simultaneous writes can happen, the objects written must be the same - # - once a job is created, it can only be updated by the scheduler and the handling worker - if not self._should_update(task): return @@ -2160,17 +2115,13 @@ def schedule_quality_autoupdate_job(self, task: Task): delay = self._get_quality_check_job_delay() next_job_time = now.utcnow() + delay - scheduler = self._get_scheduler() - existing_job_ids = set(j.id for j in scheduler.get_jobs(until=next_job_time)) - - queue_job_id = self._find_next_job_id(existing_job_ids, task, now=now) - if queue_job_id not in existing_job_ids: - scheduler.enqueue_at( - next_job_time, - self._check_task_quality, - task_id=task.id, - job_id=queue_job_id, - ) + schedule_job_with_throttling( + settings.CVAT_QUEUES.QUALITY_REPORTS.value, + self._make_queue_job_id_base(task), + next_job_time, + self._check_task_quality, + task_id=task.id, + ) def schedule_quality_check_job(self, task: Task, *, user_id: int) -> str: """ diff --git a/cvat/apps/quality_control/views.py b/cvat/apps/quality_control/views.py index b16b3d12f41..030818e17fc 100644 --- a/cvat/apps/quality_control/views.py +++ b/cvat/apps/quality_control/views.py @@ -45,7 +45,7 @@ @extend_schema(tags=["quality"]) @extend_schema_view( list=extend_schema( - summary="Method returns a paginated list of annotation conflicts", + summary="List annotation conflicts in a quality report", parameters=[ # These filters are implemented differently from others OpenApiParameter( @@ -126,13 +126,13 @@ def get_queryset(self): @extend_schema_view( retrieve=extend_schema( operation_id="quality_retrieve_report", # the default produces the plural - summary="Method returns details of a quality report", + summary="Get quality report details", responses={ "200": QualityReportSerializer, }, ), list=extend_schema( - summary="Method returns a paginated list of quality reports", + summary="List quality reports", parameters=[ # These filters are implemented differently from others OpenApiParameter( @@ -221,7 +221,7 @@ def get_queryset(self): @extend_schema( operation_id="quality_create_report", - summary="Creates a quality report asynchronously and allows to check request status", + summary="Create a quality report", parameters=[ OpenApiParameter( CREATE_REPORT_RQ_ID_PARAMETER, @@ -320,7 +320,7 @@ def create(self, request, *args, **kwargs): @extend_schema( operation_id="quality_retrieve_report_data", - summary="Retrieve full contents of the report in JSON format", + summary="Get quality report contents", responses={"200": OpenApiTypes.OBJECT}, ) @action(detail=True, methods=["GET"], url_path="data", serializer_class=None) @@ -333,13 +333,13 @@ def data(self, request, pk): @extend_schema(tags=["quality"]) @extend_schema_view( list=extend_schema( - summary="Method returns a paginated list of quality settings instances", + summary="List quality settings instances", responses={ "200": QualitySettingsSerializer(many=True), }, ), retrieve=extend_schema( - summary="Method returns details of the quality settings instance", + summary="Get quality settings instance details", parameters=[ OpenApiParameter( "id", @@ -353,7 +353,7 @@ def data(self, request, pk): }, ), partial_update=extend_schema( - summary="Methods does a partial update of chosen fields in the quality settings instance", + summary="Update a quality settings instance", parameters=[ OpenApiParameter( "id", diff --git a/cvat/apps/webhooks/views.py b/cvat/apps/webhooks/views.py index afc725fa83b..5f55312787d 100644 --- a/cvat/apps/webhooks/views.py +++ b/cvat/apps/webhooks/views.py @@ -24,22 +24,22 @@ @extend_schema(tags=["webhooks"]) @extend_schema_view( retrieve=extend_schema( - summary="Method returns details of a webhook", + summary="Get webhook details", responses={"200": WebhookReadSerializer}, ), list=extend_schema( - summary="Method returns a paginated list of webhook according to query parameters", + summary="List webhooks", responses={"200": WebhookReadSerializer(many=True)}, ), update=extend_schema( - summary="Method updates a webhook by id", + summary="Replace a webhook", request=WebhookWriteSerializer, responses={ "200": WebhookReadSerializer }, # check WebhookWriteSerializer.to_representation ), partial_update=extend_schema( - summary="Methods does a partial update of chosen fields in a webhook", + summary="Update a webhook", request=WebhookWriteSerializer, responses={ "200": WebhookReadSerializer @@ -47,14 +47,14 @@ ), create=extend_schema( request=WebhookWriteSerializer, - summary="Method creates a webhook", + summary="Create a webhook", parameters=ORGANIZATION_OPEN_API_PARAMETERS, responses={ "201": WebhookReadSerializer }, # check WebhookWriteSerializer.to_representation ), destroy=extend_schema( - summary="Method deletes a webhook", + summary="Delete a webhook", responses={"204": OpenApiResponse(description="The webhook has been deleted")}, ), ) @@ -97,7 +97,7 @@ def perform_create(self, serializer): ) @extend_schema( - summary="Method return a list of available webhook events", + summary="List available webhook events", parameters=[ OpenApiParameter( "type", @@ -128,7 +128,7 @@ def events(self, request): return Response(EventsSerializer().to_representation(events)) @extend_schema( - summary="Method return a list of deliveries for a specific webhook", + summary="List deliveries for a webhook", responses=WebhookDeliveryReadSerializer( many=True ), # Duplicate to still get 'list' op. name @@ -144,7 +144,7 @@ def deliveries(self, request, pk): ) # from @action @extend_schema( - summary="Method return a specific delivery for a specific webhook", + summary="Get details of a webhook delivery", responses={"200": WebhookDeliveryReadSerializer}, ) @action( @@ -162,7 +162,7 @@ def retrieve_delivery(self, request, pk, delivery_id): return Response(serializer.data) @extend_schema( - summary="Method redeliver a specific webhook delivery", + summary="Redeliver a webhook delivery", request=None, responses={200: None}, ) @@ -178,7 +178,7 @@ def redelivery(self, request, pk, delivery_id): return Response({}, status=status.HTTP_200_OK) @extend_schema( - summary="Method send ping webhook", + summary="Send a ping webhook", request=None, responses={"200": WebhookDeliveryReadSerializer}, ) diff --git a/cvat/requirements/base.txt b/cvat/requirements/base.txt index df10ef1b668..ed2894b1d67 100644 --- a/cvat/requirements/base.txt +++ b/cvat/requirements/base.txt @@ -6,7 +6,7 @@ # pip-compile-multi # -r ../../utils/dataset_manifest/requirements.txt -asgiref==3.7.2 +asgiref==3.8.1 # via django async-timeout==4.0.3 # via redis @@ -66,10 +66,8 @@ deprecated==1.2.14 # via limits dj-pagination==2.5.0 # via -r cvat/requirements/base.in -dj-rest-auth[with-social]==2.2.7 - # via - # -r cvat/requirements/base.in - # dj-rest-auth +dj-rest-auth[with_social]==2.2.7 + # via -r cvat/requirements/base.in django==4.2.11 # via # -r cvat/requirements/base.in @@ -118,17 +116,17 @@ easyprocess==1.1 # via pyunpack entrypoint2==1.1 # via pyunpack -fonttools==4.49.0 +fonttools==4.50.0 # via matplotlib freezegun==1.4.0 # via rq-scheduler furl==2.1.0 # via -r cvat/requirements/base.in -google-api-core==2.17.1 +google-api-core==2.18.0 # via # google-cloud-core # google-cloud-storage -google-auth==2.28.1 +google-auth==2.29.0 # via # google-api-core # google-cloud-core @@ -141,15 +139,15 @@ google-crc32c==1.5.0 # via google-resumable-media google-resumable-media==2.7.0 # via google-cloud-storage -googleapis-common-protos==1.62.0 +googleapis-common-protos==1.63.0 # via google-api-core h5py==3.10.0 # via datumaro idna==3.6 # via requests -importlib-metadata==7.0.1 +importlib-metadata==7.1.0 # via clickhouse-connect -importlib-resources==6.1.2 +importlib-resources==6.4.0 # via limits inflection==0.5.1 # via drf-spectacular @@ -167,7 +165,7 @@ jsonschema==4.17.3 # via drf-spectacular kiwisolver==1.4.5 # via matplotlib -limits==3.9.0 +limits==3.10.1 # via python-logstash-async lxml==5.1.0 # via datumaro @@ -189,9 +187,9 @@ oauthlib==3.2.2 # via requests-oauthlib orderedmultidict==1.0.1 # via furl -orjson==3.9.15 +orjson==3.10.0 # via datumaro -packaging==23.2 +packaging==24.0 # via # limits # matplotlib @@ -203,21 +201,24 @@ patool==1.12 # via -r cvat/requirements/base.in pdf2image==1.14.0 # via -r cvat/requirements/base.in +proto-plus==1.23.0 + # via google-api-core protobuf==4.25.3 # via # google-api-core # googleapis-common-protos + # proto-plus # tensorboardx psutil==5.9.4 # via -r cvat/requirements/base.in psycopg2-binary==2.9.5 # via -r cvat/requirements/base.in -pyasn1==0.5.1 +pyasn1==0.6.0 # via # pyasn1-modules # python-ldap # rsa -pyasn1-modules==0.3.0 +pyasn1-modules==0.4.0 # via # google-auth # python-ldap @@ -226,12 +227,10 @@ pycocotools==2.0.7 pycparser==2.21 # via cffi pyjwt[crypto]==2.8.0 - # via - # django-allauth - # pyjwt + # via django-allauth pylogbeat==2.0.1 # via python-logstash-async -pyparsing==3.1.1 +pyparsing==3.1.2 # via matplotlib pyrsistent==0.20.0 # via jsonschema @@ -280,7 +279,7 @@ requests==2.31.0 # msrest # python-logstash-async # requests-oauthlib -requests-oauthlib==1.3.1 +requests-oauthlib==2.0.0 # via # django-allauth # msrest @@ -337,7 +336,7 @@ urllib3==1.26.18 # requests wrapt==1.16.0 # via deprecated -zipp==3.17.0 +zipp==3.18.1 # via importlib-metadata zstandard==0.22.0 # via clickhouse-connect diff --git a/cvat/requirements/development.txt b/cvat/requirements/development.txt index 3625cd7745b..2c9f1bcc0a3 100644 --- a/cvat/requirements/development.txt +++ b/cvat/requirements/development.txt @@ -8,9 +8,9 @@ -r base.txt astroid==2.11.7 # via pylint -autopep8==2.0.4 +autopep8==2.1.0 # via django-silk -black==24.2.0 +black==24.3.0 # via -r cvat/requirements/development.in dill==0.3.8 # via pylint @@ -62,5 +62,5 @@ tornado==6.4 # via snakeviz # The following packages are considered to be unsafe in a requirements file: -setuptools==69.1.1 +setuptools==69.2.0 # via astroid diff --git a/cvat/requirements/production.txt b/cvat/requirements/production.txt index 0bea0b968b1..1f4b6e44f27 100644 --- a/cvat/requirements/production.txt +++ b/cvat/requirements/production.txt @@ -21,9 +21,7 @@ python-dotenv==1.0.1 sniffio==1.3.1 # via anyio uvicorn[standard]==0.22.0 - # via - # -r cvat/requirements/production.in - # uvicorn + # via -r cvat/requirements/production.in uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 diff --git a/cvat/schema.yml b/cvat/schema.yml index b66048b7289..9ff8d3a308f 100644 --- a/cvat/schema.yml +++ b/cvat/schema.yml @@ -16,7 +16,7 @@ paths: get: operationId: analytics_get_reports description: Receive analytics report - summary: Method returns analytics report + summary: Get an analytics report parameters: - in: query name: end_date @@ -64,8 +64,7 @@ paths: description: Not found post: operationId: analytics_create_report - summary: Creates a analytics report asynchronously and allows to check request - status + summary: Create an analytics report parameters: - in: query name: rq_id @@ -105,8 +104,7 @@ paths: /api/assets: post: operationId: assets_create - summary: Method saves new asset on the server and attaches it to a corresponding - guide + summary: Create an asset tags: - assets requestBody: @@ -134,7 +132,7 @@ paths: /api/assets/{uuid}: get: operationId: assets_retrieve - summary: Method returns an asset file + summary: Get an asset parameters: - in: path name: uuid @@ -156,7 +154,7 @@ paths: description: Asset file delete: operationId: assets_destroy - summary: Method deletes a specific asset from the server + summary: Delete an asset parameters: - in: path name: uuid @@ -391,7 +389,7 @@ paths: /api/cloudstorages: get: operationId: cloudstorages_list - summary: Returns a paginated list of storages + summary: List cloud storages parameters: - name: X-Organization in: header @@ -504,7 +502,7 @@ paths: description: '' post: operationId: cloudstorages_create - summary: Method creates a cloud storage with a specified characteristics + summary: Create a cloud storage parameters: - in: header name: X-Organization @@ -592,7 +590,7 @@ paths: /api/cloudstorages/{id}: get: operationId: cloudstorages_retrieve - summary: Method returns details of a specific cloud storage + summary: Get cloud storage details parameters: - in: path name: id @@ -617,7 +615,7 @@ paths: description: '' patch: operationId: cloudstorages_partial_update - summary: Methods does a partial update of chosen fields in a cloud storage instance + summary: Update a cloud storage parameters: - in: path name: id @@ -694,7 +692,7 @@ paths: description: '' delete: operationId: cloudstorages_destroy - summary: Method deletes a specific cloud storage + summary: Delete a cloud storage parameters: - in: path name: id @@ -718,7 +716,7 @@ paths: operationId: cloudstorages_retrieve_actions description: Method return allowed actions for cloud storage. It's required for reading/writing - summary: Method returns allowed actions for the cloud storage + summary: Get allowed actions for a cloud storage parameters: - in: path name: id @@ -744,7 +742,7 @@ paths: /api/cloudstorages/{id}/content-v2: get: operationId: cloudstorages_retrieve_content_v2 - summary: Method returns the content of the cloud storage + summary: Get cloud storage content parameters: - in: path name: id @@ -789,7 +787,7 @@ paths: /api/cloudstorages/{id}/preview: get: operationId: cloudstorages_retrieve_preview - summary: Method returns a preview image from a cloud storage + summary: Get a preview image for a cloud storage parameters: - in: path name: id @@ -815,7 +813,7 @@ paths: /api/cloudstorages/{id}/status: get: operationId: cloudstorages_retrieve_status - summary: Method returns a cloud storage status + summary: Get the status of a cloud storage parameters: - in: path name: id @@ -841,7 +839,7 @@ paths: /api/comments: get: operationId: comments_list - summary: Method returns a paginated list of comments + summary: List comments parameters: - name: X-Organization in: header @@ -936,7 +934,7 @@ paths: description: '' post: operationId: comments_create - summary: Method creates a comment + summary: Create a comment parameters: - in: header name: X-Organization @@ -977,7 +975,7 @@ paths: /api/comments/{id}: get: operationId: comments_retrieve - summary: Method returns details of a comment + summary: Get comment details parameters: - in: path name: id @@ -1002,7 +1000,7 @@ paths: description: '' patch: operationId: comments_partial_update - summary: Methods does a partial update of chosen fields in a comment + summary: Update a comment parameters: - in: path name: id @@ -1032,7 +1030,7 @@ paths: description: '' delete: operationId: comments_destroy - summary: Method deletes a comment + summary: Delete a comment parameters: - in: path name: id @@ -1054,8 +1052,8 @@ paths: /api/events: get: operationId: events_list - description: Receive logs from the server - summary: 'Method returns csv log file ' + description: The log is returned in the CSV format. + summary: Get an event log parameters: - in: query name: action @@ -1132,7 +1130,7 @@ paths: post: operationId: events_create description: Sends logs to the Clickhouse if it is connected - summary: Method saves logs from a client on the server + summary: Log client events parameters: - in: header name: X-Organization @@ -1173,7 +1171,9 @@ paths: /api/guides: post: operationId: guides_create - summary: Method creates a new annotation guide binded to a project or to a task + description: The new guide will be bound either to a project or a task, depending + on parameters. + summary: Create an annotation guide tags: - guides requestBody: @@ -1197,7 +1197,7 @@ paths: /api/guides/{id}: get: operationId: guides_retrieve - summary: Method returns details of a specific annotation guide + summary: Get annotation guide details parameters: - in: path name: id @@ -1222,7 +1222,7 @@ paths: description: '' patch: operationId: guides_partial_update - summary: Methods does a partial update of chosen fields in an annotation guide + summary: Update an annotation guide parameters: - in: path name: id @@ -1252,7 +1252,8 @@ paths: description: '' delete: operationId: guides_destroy - summary: Method deletes a specific annotation guide and all attached assets + description: This also deletes all assets attached to the guide. + summary: Delete an annotation guide parameters: - in: path name: id @@ -1274,7 +1275,7 @@ paths: /api/invitations: get: operationId: invitations_list - summary: Method returns a paginated list of invitations + summary: List invitations parameters: - name: X-Organization in: header @@ -1354,7 +1355,7 @@ paths: description: '' post: operationId: invitations_create - summary: Method creates an invitation + summary: Create an invitation parameters: - in: header name: X-Organization @@ -1395,7 +1396,7 @@ paths: /api/invitations/{key}: get: operationId: invitations_retrieve - summary: Method returns details of an invitation + summary: Get invitation details parameters: - in: path name: key @@ -1420,7 +1421,7 @@ paths: description: '' patch: operationId: invitations_partial_update - summary: Methods does a partial update of chosen fields in an invitation + summary: Update an invitation parameters: - in: path name: key @@ -1450,7 +1451,7 @@ paths: description: '' delete: operationId: invitations_destroy - summary: Method deletes an invitation + summary: Delete an invitation parameters: - in: path name: key @@ -1472,7 +1473,7 @@ paths: /api/invitations/{key}/accept: post: operationId: invitations_accept - summary: Method registers user and accepts invitation to organization + summary: Accept an invitation parameters: - in: path name: key @@ -1500,7 +1501,7 @@ paths: /api/invitations/{key}/decline: post: operationId: invitations_decline - summary: Method declines the invitation to organization + summary: Decline an invitation parameters: - in: path name: key @@ -1522,7 +1523,7 @@ paths: /api/invitations/{key}/resend: post: operationId: invitations_resend - summary: Method resends the invitation to organization + summary: Resend an invitation parameters: - in: path name: key @@ -1546,7 +1547,7 @@ paths: /api/issues: get: operationId: issues_list - summary: Method returns a paginated list of issues + summary: List issues parameters: - name: X-Organization in: header @@ -1652,7 +1653,7 @@ paths: description: '' post: operationId: issues_create - summary: Method creates an issue + summary: Create an issue parameters: - in: header name: X-Organization @@ -1693,7 +1694,7 @@ paths: /api/issues/{id}: get: operationId: issues_retrieve - summary: Method returns details of an issue + summary: Get issue details parameters: - in: path name: id @@ -1718,7 +1719,7 @@ paths: description: '' patch: operationId: issues_partial_update - summary: Methods does a partial update of chosen fields in an issue + summary: Update an issue parameters: - in: path name: id @@ -1748,7 +1749,7 @@ paths: description: '' delete: operationId: issues_destroy - summary: Method deletes an issue + summary: Delete an issue parameters: - in: path name: id @@ -1770,7 +1771,7 @@ paths: /api/jobs: get: operationId: jobs_list - summary: Method returns a paginated list of jobs + summary: List jobs parameters: - name: X-Organization in: header @@ -1907,7 +1908,7 @@ paths: description: '' post: operationId: jobs_create - summary: Method creates a new job in the task + summary: Create a job tags: - jobs requestBody: @@ -1932,7 +1933,7 @@ paths: /api/jobs/{id}: get: operationId: jobs_retrieve - summary: Method returns details of a job + summary: Get job details parameters: - in: path name: id @@ -1957,7 +1958,7 @@ paths: description: '' patch: operationId: jobs_partial_update - summary: Methods does a partial update of chosen fields in a job + summary: Update a job parameters: - in: path name: id @@ -1988,9 +1989,11 @@ paths: delete: operationId: jobs_destroy description: | + Related annotations will be deleted as well. + Please note, that not every job can be removed. Currently, it is only available for Ground Truth jobs. - summary: Method deletes a job and its related annotations + summary: Delete a job parameters: - in: path name: id @@ -2012,8 +2015,10 @@ paths: /api/jobs/{id}/annotations/: get: operationId: jobs_retrieve_annotations - summary: Method returns annotations for a specific job as a JSON document. If - format is specified, a zip archive is returned. + description: | + If format is specified, a ZIP archive will be returned. Otherwise, + the annotations will be returned as a JSON document. + summary: Get job annotations parameters: - in: query name: action @@ -2086,11 +2091,10 @@ paths: operationId: jobs_create_annotations description: |2 - The request POST /api/jobs/id/annotations will initiate file upload and will create - the rq job on the server in which the process of annotations uploading from file - will be carried out. Please, use the PUT /api/jobs/id/annotations endpoint for checking status of the process. - summary: Method allows to initialize the process of the job annotation upload - from a local file or a cloud storage + The request POST /api/jobs/id/annotations will initiate the import and will create + the rq job on the server in which the import will be carried out. + Please, use the PUT /api/jobs/id/annotations endpoint for checking status of the process. + summary: Import annotations into a job parameters: - in: query name: cloud_storage_id @@ -2161,13 +2165,12 @@ paths: operationId: jobs_update_annotations description: |2 - To check the status of the process of uploading a job annotations from a file: + To check the status of an import request: - After initiating the annotations upload, you will receive an rq_id parameter. + After initiating the annotation import, you will receive an rq_id parameter. Make sure to include this parameter as a query parameter in your subsequent - PUT /api/jobs/id/annotations requests to track the status of the annotations upload. - summary: Method performs an update of all annotations in a specific job or used - for uploading annotations from a file + PUT /api/jobs/id/annotations requests to track the status of the import. + summary: Replace job annotations / Get annotation import status parameters: - in: query name: cloud_storage_id @@ -2230,14 +2233,14 @@ paths: - basicAuth: [] responses: '201': - description: Uploading has finished + description: Import has finished '202': - description: Uploading has been started + description: Import is in progress '405': description: Format is not available patch: operationId: jobs_partial_update_annotations - summary: Method performs a partial update of annotations in a specific job + summary: Update job annotations parameters: - in: query name: action @@ -2275,7 +2278,7 @@ paths: description: Annotations successfully uploaded delete: operationId: jobs_destroy_annotations - summary: Method deletes all annotations for a specific job + summary: Delete job annotations parameters: - in: path name: id @@ -2297,7 +2300,7 @@ paths: /api/jobs/{id}/data: get: operationId: jobs_retrieve_data - summary: Method returns data for a specific job + summary: Get data of a job parameters: - in: path name: id @@ -2346,8 +2349,7 @@ paths: /api/jobs/{id}/data/meta: get: operationId: jobs_retrieve_data_meta - summary: Method provides a meta information about media files which are related - with the job + summary: Get metainformation for media files in a job parameters: - in: path name: id @@ -2372,8 +2374,7 @@ paths: description: '' patch: operationId: jobs_partial_update_data_meta - summary: Method provides a meta information about media files which are related - with the job + summary: Update metainformation for media files in a job parameters: - in: path name: id @@ -2478,7 +2479,7 @@ paths: /api/jobs/{id}/preview: get: operationId: jobs_retrieve_preview - summary: Method returns a preview image for the job + summary: Get a preview image for a job parameters: - in: path name: id @@ -2500,7 +2501,7 @@ paths: /api/labels: get: operationId: labels_list - summary: Method returns a paginated list of labels + summary: List labels parameters: - in: header name: X-Organization @@ -2627,7 +2628,7 @@ paths: /api/labels/{id}: get: operationId: labels_retrieve - summary: Method returns details of a label + summary: Get label details parameters: - in: path name: id @@ -2652,8 +2653,9 @@ paths: description: '' patch: operationId: labels_partial_update - summary: Methods does a partial update of chosen fields in a labelTo modify - a sublabel, please use the PATCH method of the parent label + description: To modify a sublabel, please use the PATCH method of the parent + label. + summary: Update a label parameters: - in: path name: id @@ -2683,8 +2685,9 @@ paths: description: '' delete: operationId: labels_destroy - summary: Method deletes a label. To delete a sublabel, please use the PATCH - method of the parent label + description: To delete a sublabel, please use the PATCH method of the parent + label. + summary: Delete a label parameters: - in: path name: id @@ -2888,7 +2891,7 @@ paths: /api/memberships: get: operationId: memberships_list - summary: Method returns a paginated list of memberships + summary: List memberships parameters: - name: X-Organization in: header @@ -2979,7 +2982,7 @@ paths: /api/memberships/{id}: get: operationId: memberships_retrieve - summary: Method returns details of a membership + summary: Get membership details parameters: - in: path name: id @@ -3004,7 +3007,7 @@ paths: description: '' patch: operationId: memberships_partial_update - summary: Methods does a partial update of chosen fields in a membership + summary: Update a membership parameters: - in: path name: id @@ -3034,7 +3037,7 @@ paths: description: '' delete: operationId: memberships_destroy - summary: Method deletes a membership + summary: Delete a membership parameters: - in: path name: id @@ -3056,7 +3059,7 @@ paths: /api/organizations: get: operationId: organizations_list - summary: Method returns a paginated list of organizations + summary: List organizations parameters: - name: filter required: false @@ -3132,7 +3135,7 @@ paths: description: '' post: operationId: organizations_create - summary: Method creates an organization + summary: Create an organization tags: - organizations requestBody: @@ -3157,7 +3160,7 @@ paths: /api/organizations/{id}: get: operationId: organizations_retrieve - summary: Method returns details of an organization + summary: Get organization details parameters: - in: path name: id @@ -3182,7 +3185,7 @@ paths: description: '' patch: operationId: organizations_partial_update - summary: Methods does a partial update of chosen fields in an organization + summary: Update an organization parameters: - in: path name: id @@ -3212,7 +3215,7 @@ paths: description: '' delete: operationId: organizations_destroy - summary: Method deletes an organization + summary: Delete an organization parameters: - in: path name: id @@ -3234,7 +3237,7 @@ paths: /api/projects: get: operationId: projects_list - summary: Returns a paginated list of projects + summary: List projects parameters: - name: X-Organization in: header @@ -3334,7 +3337,7 @@ paths: description: '' post: operationId: projects_create - summary: Method creates a new project + summary: Create a project parameters: - in: header name: X-Organization @@ -3375,7 +3378,7 @@ paths: /api/projects/{id}: get: operationId: projects_retrieve - summary: Method returns details of a specific project + summary: Get project details parameters: - in: path name: id @@ -3400,7 +3403,7 @@ paths: description: '' patch: operationId: projects_partial_update - summary: Methods does a partial update of chosen fields in a project + summary: Update a project parameters: - in: path name: id @@ -3430,7 +3433,7 @@ paths: description: '' delete: operationId: projects_destroy - summary: Method deletes a specific project + summary: Delete a project parameters: - in: path name: id @@ -3452,7 +3455,7 @@ paths: /api/projects/{id}/annotations: get: operationId: projects_retrieve_annotations - summary: Method allows to download project annotations + summary: Get project annotations parameters: - in: query name: action @@ -3527,7 +3530,7 @@ paths: /api/projects/{id}/backup: get: operationId: projects_retrieve_backup - summary: Methods creates a backup copy of a project + summary: Back up a project parameters: - in: query name: action @@ -3593,7 +3596,7 @@ paths: Make sure to include this parameter as a query parameter in your subsequent GET /api/projects/id/dataset requests to track the status of the dataset import. Also you should specify action parameter: action=import_status. - summary: Export project as a dataset in a specific format + summary: Export a project as a dataset / Check dataset import status parameters: - in: query name: action @@ -3676,8 +3679,7 @@ paths: The request POST /api/projects/id/dataset will initiate file upload and will create the rq job on the server in which the process of dataset import from a file will be carried out. Please, use the GET /api/projects/id/dataset endpoint for checking status of the process. - summary: Import dataset in specific format as a project or check status of dataset - import process + summary: Import a dataset into a project parameters: - in: query name: cloud_storage_id @@ -3748,7 +3750,7 @@ paths: /api/projects/{id}/preview: get: operationId: projects_retrieve_preview - summary: Method returns a preview image for the project + summary: Get a preview image for a project parameters: - in: path name: id @@ -3784,7 +3786,7 @@ paths: Make sure to include this parameter as a query parameter in your subsequent requests to track the status of the project creation. Once the project has been successfully created, the server will return the id of the newly created project. - summary: Methods create a project from a backup + summary: Recreate a project from a backup parameters: - in: header name: X-Organization @@ -3853,7 +3855,7 @@ paths: /api/quality/conflicts: get: operationId: quality_list_conflicts - summary: Method returns a paginated list of annotation conflicts + summary: List annotation conflicts in a quality report parameters: - name: X-Organization in: header @@ -3965,7 +3967,7 @@ paths: /api/quality/reports: get: operationId: quality_list_reports - summary: Method returns a paginated list of quality reports + summary: List quality reports parameters: - name: X-Organization in: header @@ -4055,8 +4057,7 @@ paths: description: '' post: operationId: quality_create_report - summary: Creates a quality report asynchronously and allows to check request - status + summary: Create a quality report parameters: - in: query name: rq_id @@ -4100,7 +4101,7 @@ paths: /api/quality/reports/{id}: get: operationId: quality_retrieve_report - summary: Method returns details of a quality report + summary: Get quality report details parameters: - in: path name: id @@ -4126,7 +4127,7 @@ paths: /api/quality/reports/{id}/data: get: operationId: quality_retrieve_report_data - summary: Retrieve full contents of the report in JSON format + summary: Get quality report contents parameters: - in: path name: id @@ -4152,7 +4153,7 @@ paths: /api/quality/settings: get: operationId: quality_list_settings - summary: Method returns a paginated list of quality settings instances + summary: List quality settings instances parameters: - name: X-Organization in: header @@ -4227,7 +4228,7 @@ paths: /api/quality/settings/{id}: get: operationId: quality_retrieve_settings - summary: Method returns details of the quality settings instance + summary: Get quality settings instance details parameters: - in: path name: id @@ -4252,8 +4253,7 @@ paths: description: '' patch: operationId: quality_partial_update_settings - summary: Methods does a partial update of chosen fields in the quality settings - instance + summary: Update a quality settings instance parameters: - in: path name: id @@ -4427,7 +4427,7 @@ paths: /api/server/about: get: operationId: server_retrieve_about - summary: Method provides basic CVAT information + summary: Get basic CVAT information tags: - server security: @@ -4446,7 +4446,7 @@ paths: /api/server/annotation/formats: get: operationId: server_retrieve_annotation_formats - summary: Method provides the list of supported annotations formats + summary: Get supported annotation formats tags: - server security: @@ -4465,7 +4465,7 @@ paths: /api/server/plugins: get: operationId: server_retrieve_plugins - summary: Method provides allowed plugins + summary: Get enabled plugins tags: - server security: @@ -4484,8 +4484,7 @@ paths: /api/server/share: get: operationId: server_list_share - summary: Returns all files and folders that are on the server along specified - path + summary: List files/directories in the mounted share parameters: - in: query name: directory @@ -4517,7 +4516,7 @@ paths: /api/tasks: get: operationId: tasks_list - summary: Returns a paginated list of tasks + summary: List tasks parameters: - name: X-Organization in: header @@ -4659,8 +4658,10 @@ paths: description: '' post: operationId: tasks_create - summary: Method creates a new task in a database without any attached images - and videos + description: | + The new task will not have any attached images or videos. + To attach them, use the /api/tasks//data endpoint. + summary: Create a task parameters: - in: header name: X-Organization @@ -4701,7 +4702,7 @@ paths: /api/tasks/{id}: get: operationId: tasks_retrieve - summary: Method returns details of a specific task + summary: Get task details parameters: - in: path name: id @@ -4726,7 +4727,7 @@ paths: description: '' patch: operationId: tasks_partial_update - summary: Methods does a partial update of chosen fields in a task + summary: Update a task parameters: - in: path name: id @@ -4756,8 +4757,8 @@ paths: description: '' delete: operationId: tasks_destroy - summary: Method deletes a specific task, all attached jobs, annotations, and - data + description: All attached jobs, annotations and data will be deleted as well. + summary: Delete a task parameters: - in: path name: id @@ -4779,7 +4780,7 @@ paths: /api/tasks/{id}/annotations/: get: operationId: tasks_retrieve_annotations - summary: Method allows to download task annotations + summary: Get task annotations parameters: - in: query name: action @@ -4854,11 +4855,10 @@ paths: operationId: tasks_create_annotations description: |2 - The request POST /api/tasks/id/annotations will initiate file upload and will create - the rq job on the server in which the process of annotations uploading from file - will be carried out. Please, use the PUT /api/tasks/id/annotations endpoint for checking status of the process. - summary: Method allows to initialize the process of upload task annotations - from a local or a cloud storage file + The request POST /api/tasks/id/annotations will initiate the import and will create + the rq job on the server in which the import will be carried out. + Please, use the PUT /api/tasks/id/annotations endpoint for checking status of the process. + summary: Import annotations into a task parameters: - in: query name: cloud_storage_id @@ -4929,12 +4929,12 @@ paths: operationId: tasks_update_annotations description: |2 - To check the status of the process of uploading a task annotations from a file: + To check the status of an import request: - After initiating the annotations upload, you will receive an rq_id parameter. + After initiating the annotation import, you will receive an rq_id parameter. Make sure to include this parameter as a query parameter in your subsequent - PUT /api/tasks/id/annotations requests to track the status of the annotations upload. - summary: Method allows to upload task annotations or edit existing annotations + PUT /api/tasks/id/annotations requests to track the status of the import. + summary: Replace task annotations / Get annotation import status parameters: - in: query name: format @@ -4973,14 +4973,14 @@ paths: - basicAuth: [] responses: '201': - description: Uploading has finished + description: Import has finished '202': - description: Uploading has been started + description: Import is in progress '405': description: Format is not available patch: operationId: tasks_partial_update_annotations - summary: Method performs a partial update of annotations in a specific task + summary: Update task annotations parameters: - in: query name: action @@ -5022,7 +5022,7 @@ paths: description: '' delete: operationId: tasks_destroy_annotations - summary: Method deletes all annotations for a specific task + summary: Delete task annotations parameters: - in: path name: id @@ -5044,7 +5044,7 @@ paths: /api/tasks/{id}/backup: get: operationId: tasks_retrieve_backup - summary: Method backup a specified task + summary: Back up a task parameters: - in: query name: action @@ -5104,7 +5104,7 @@ paths: /api/tasks/{id}/data/: get: operationId: tasks_retrieve_data - summary: Method returns data for a specific task + summary: Get data of a task parameters: - in: path name: id @@ -5148,7 +5148,7 @@ paths: post: operationId: tasks_create_data description: | - Allows to upload data to a task. + Allows to upload data (images, video, etc.) to a task. Supports the TUS open file uploading protocol (https://tus.io/). Supports the following protocols: @@ -5197,7 +5197,9 @@ paths: After all data is sent, the operation status can be retrieved via the /status endpoint. - summary: Method permanently attaches data (images, video, etc.) to a task + + Once data is attached to a task, it cannot be detached or replaced. + summary: Attach data to a task parameters: - in: header name: Upload-Finish @@ -5245,8 +5247,7 @@ paths: /api/tasks/{id}/data/meta: get: operationId: tasks_retrieve_data_meta - summary: Method provides a meta information about media files which are related - with the task + summary: Get metainformation for media files in a task parameters: - in: path name: id @@ -5271,8 +5272,7 @@ paths: description: '' patch: operationId: tasks_partial_update_data_meta - summary: Method provides a meta information about media files which are related - with the task + summary: Update metainformation for media files in a task parameters: - in: path name: id @@ -5379,7 +5379,7 @@ paths: /api/tasks/{id}/preview: get: operationId: tasks_retrieve_preview - summary: Method returns a preview image for the task + summary: Get a preview image for a task parameters: - in: path name: id @@ -5403,8 +5403,7 @@ paths: /api/tasks/{id}/status: get: operationId: tasks_retrieve_status - summary: When task is being created the method returns information about a status - of the creation process + summary: Get the creation status of a task parameters: - in: path name: id @@ -5442,7 +5441,7 @@ paths: Make sure to include this parameter as a query parameter in your subsequent requests to track the status of the task creation. Once the task has been successfully created, the server will return the id of the newly created task. - summary: Method recreates a task from an attached task backup file + summary: Recreate a task from a backup parameters: - in: header name: X-Organization @@ -5511,7 +5510,7 @@ paths: /api/users: get: operationId: users_list - summary: Method returns a paginated list of users + summary: List users parameters: - name: X-Organization in: header @@ -5608,7 +5607,7 @@ paths: /api/users/{id}: get: operationId: users_retrieve - summary: Method provides information of a specific user + summary: Get user details parameters: - in: path name: id @@ -5633,7 +5632,7 @@ paths: description: '' patch: operationId: users_partial_update - summary: Method updates chosen fields of a user + summary: Update a user parameters: - in: path name: id @@ -5663,7 +5662,7 @@ paths: description: '' delete: operationId: users_destroy - summary: Method deletes a specific user from the server + summary: Delete a user parameters: - in: path name: id @@ -5686,7 +5685,7 @@ paths: get: operationId: users_retrieve_self description: Method returns an instance of a user who is currently authorized - summary: Method returns an instance of a user who is currently authorized + summary: Get details of the current user tags: - users security: @@ -5705,7 +5704,7 @@ paths: /api/webhooks: get: operationId: webhooks_list - summary: Method returns a paginated list of webhook according to query parameters + summary: List webhooks parameters: - name: X-Organization in: header @@ -5805,7 +5804,7 @@ paths: description: '' post: operationId: webhooks_create - summary: Method creates a webhook + summary: Create a webhook parameters: - in: header name: X-Organization @@ -5846,7 +5845,7 @@ paths: /api/webhooks/{id}: get: operationId: webhooks_retrieve - summary: Method returns details of a webhook + summary: Get webhook details parameters: - in: path name: id @@ -5871,7 +5870,7 @@ paths: description: '' put: operationId: webhooks_update - summary: Method updates a webhook by id + summary: Replace a webhook parameters: - in: path name: id @@ -5902,7 +5901,7 @@ paths: description: '' patch: operationId: webhooks_partial_update - summary: Methods does a partial update of chosen fields in a webhook + summary: Update a webhook parameters: - in: path name: id @@ -5932,7 +5931,7 @@ paths: description: '' delete: operationId: webhooks_destroy - summary: Method deletes a webhook + summary: Delete a webhook parameters: - in: path name: id @@ -5954,7 +5953,7 @@ paths: /api/webhooks/{id}/deliveries: get: operationId: webhooks_list_deliveries - summary: Method return a list of deliveries for a specific webhook + summary: List deliveries for a webhook parameters: - in: path name: id @@ -5992,7 +5991,7 @@ paths: /api/webhooks/{id}/deliveries/{delivery_id}: get: operationId: webhooks_retrieve_deliveries - summary: Method return a specific delivery for a specific webhook + summary: Get details of a webhook delivery parameters: - in: path name: delivery_id @@ -6024,7 +6023,7 @@ paths: /api/webhooks/{id}/deliveries/{delivery_id}/redelivery: post: operationId: webhooks_create_deliveries_redelivery - summary: Method redeliver a specific webhook delivery + summary: Redeliver a webhook delivery parameters: - in: path name: delivery_id @@ -6052,7 +6051,7 @@ paths: /api/webhooks/{id}/ping: post: operationId: webhooks_create_ping - summary: Method send ping webhook + summary: Send a ping webhook parameters: - in: path name: id @@ -6078,7 +6077,7 @@ paths: /api/webhooks/events: get: operationId: webhooks_retrieve_events - summary: Method return a list of available webhook events + summary: List available webhook events parameters: - in: query name: type diff --git a/cvat/utils/background_jobs.py b/cvat/utils/background_jobs.py new file mode 100644 index 00000000000..caf2e859a53 --- /dev/null +++ b/cvat/utils/background_jobs.py @@ -0,0 +1,48 @@ +# Copyright (C) 2024 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from collections.abc import Callable +from datetime import datetime + +import django_rq + +def schedule_job_with_throttling( + queue_name: str, + job_id_base: str, + scheduled_time: datetime, + func: Callable, + **func_kwargs +) -> None: + """ + This function schedules an RQ job to run at `scheduled_time`, + unless it had already been used to schedule a job to run at some future time + with the same values of `queue_name` and `job_id_base`, + in which case it does nothing. + + The scheduled job will have an ID beginning with `job_id_base`, + and will execute `func(**func_kwargs)`. + """ + with django_rq.get_connection(queue_name) as connection: + # The blocker key is used to implement the throttling. + # The first time this function is called for a given tuple of + # (queue_name, job_id_base), we schedule the job and create a blocker + # that expires at the same time as the job is supposed to start. + # Until the blocker expires, we don't schedule any more jobs + # with the same tuple. + blocker_key = f"cvat:utils:scheduling-blocker:{queue_name}:{job_id_base}" + if connection.exists(blocker_key): + return + + queue_job_id = f"{job_id_base}-{scheduled_time.timestamp()}" + + # TODO: reuse the Redis connection if Django-RQ allows it. + # See . + django_rq.get_scheduler(queue_name).enqueue_at( + scheduled_time, + func, + **func_kwargs, + job_id=queue_job_id, + ) + + connection.set(blocker_key, queue_job_id, exat=scheduled_time) diff --git a/site/build_docs.py b/site/build_docs.py index 6a810ef605c..d4720ad8b0e 100755 --- a/site/build_docs.py +++ b/site/build_docs.py @@ -146,7 +146,8 @@ def run_hugo( run_hugo( output_dir / tag.name, - # Docsy doesn't forward the current version url to templates + # This variable is no longer needed by the current version, + # but it was required in v2.11.2 and older. extra_env_vars={VERSION_URL_ENV_VAR: f"/cvat/{tag.name}/docs"}, executable=hugo, ) diff --git a/site/layouts/home.html b/site/layouts/home.html index a1590cdde65..c4e8592c86f 100644 --- a/site/layouts/home.html +++ b/site/layouts/home.html @@ -1,10 +1,6 @@ - {{ $version_rel_url := os.Getenv "HUGO_VERSION_REL_URL" }} - {{ if not $version_rel_url }} - {{ $version_rel_url = "/cvat/docs" }} - {{ end }} - + \ No newline at end of file diff --git a/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js b/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js new file mode 100644 index 00000000000..c31c06410c5 --- /dev/null +++ b/tests/cypress/e2e/issues_prs2/issue_7428_importing_annotation_from_cloud_after_local_import.js @@ -0,0 +1,134 @@ +// Copyright (C) 2024 CVAT.ai Corporation +// +// SPDX-License-Identifier: MIT + +/// + +import { taskName, labelName } from '../../support/const'; + +context('Incorrect cloud storage filename used in subsequent import.', () => { + const annotationsArchiveNameCloud = 'bazquux'; + const annotationsArchiveNameLocal = 'foobar'; + let createdCloudStorageId; + const issueId = '7428'; + const exportFormat = 'CVAT for images'; + const rectangleShape2Points = { + points: 'By 2 Points', + type: 'Shape', + labelName, + firstX: 400, + firstY: 100, + secondX: 500, + secondY: 200, + }; + + const serverHost = Cypress.config('baseUrl').includes('3000') ? 'localhost' : 'minio'; + + const cloudStorageData = { + displayName: 'Demo bucket', + resource: 'public', + manifest: 'manifest.jsonl', + endpointUrl: `http://${serverHost}:9000`, + }; + + function uploadToTask({ + useDefaultLocation = true, + annotationsArchiveName, + CloudStorageId, + }) { + cy.clickInTaskMenu('Upload annotations', true); + cy.get('.cvat-modal-import-dataset').find('.cvat-modal-import-select').click(); + cy.contains('.cvat-modal-import-dataset-option-item', 'CVAT 1.1').click(); + cy.get('.cvat-modal-import-select').should('contain.text', 'CVAT 1.1'); + if (!useDefaultLocation) { + cy.get('.cvat-modal-import-dataset') + .find('.cvat-modal-import-switch-use-default-storage') + .click(); + cy.get('.cvat-select-source-storage').within(() => { + cy.get('.ant-select-selection-item').click(); + }); + cy.contains('.cvat-select-source-storage-location', 'Cloud storage') + .should('be.visible') + .click(); + if (CloudStorageId) { + cy.get('.cvat-search-source-storage-cloud-storage-field').click(); + cy.get('.cvat-cloud-storage-select-provider').click(); + } + cy.get('.cvat-modal-import-dataset') + .find('.cvat-modal-import-filename-input') + .type(annotationsArchiveName); + } else { + cy.get('input[type="file"]').attachFile(annotationsArchiveName, { subjectType: 'drag-n-drop' }); + cy.get(`[title="${annotationsArchiveName}"]`).should('be.visible'); + } + cy.contains('button', 'OK').click(); + cy.get('.cvat-modal-content-load-task-annotation') + .should('be.visible') + .within(() => { + cy.contains('button', 'Update').click(); + }); + cy.get('.cvat-notification-notice-import-annotation-start').should('be.visible'); + cy.closeNotification('.cvat-notification-notice-import-annotation-start'); + cy.verifyNotification(); + cy.get('.cvat-notification-notice-upload-annotations-fail').should('not.exist'); + } + + before(() => { + createdCloudStorageId = cy.attachS3Bucket(cloudStorageData); + cy.goToTaskList(); + cy.openTaskJob(taskName); + cy.createRectangle(rectangleShape2Points); + cy.saveJob('PATCH', 200, 'saveJobExportDataset'); + cy.goToTaskList(); + cy.openTask(taskName); + }); + + after(() => { + cy.contains('.cvat-header-button', 'Cloud Storages').should('be.visible').click(); + cy.url().should('include', '/cloudstorages'); + cy.deleteCloudStorage(cloudStorageData.displayName); + }); + + describe(`Testing issue "${issueId}"`, () => { + it('Export Annotation to the local storage', () => { + const exportParams = { + type: 'annotations', + format: exportFormat, + archiveCustomName: annotationsArchiveNameLocal, + }; + cy.exportTask(exportParams); + cy.waitForDownload(); + }); + + it('Export Annotation to the cloud storage', () => { + const exportParams = { + type: 'annotations', + format: exportFormat, + archiveCustomName: annotationsArchiveNameCloud, + targetStorage: { + location: 'Cloud storage', + cloudStorageId: createdCloudStorageId, + }, + useDefaultLocation: false, + }; + cy.exportTask(exportParams); + cy.waitForFileUploadToCloudStorage(); + }); + + it('Import Annotation from the local storage', () => { + const importParams = { + useDefaultLocation: true, + annotationsArchiveName: `${annotationsArchiveNameLocal}.zip`, + }; + uploadToTask(importParams); + }); + it('Import Annotation from the cloud storage', () => { + const importParams = { + useDefaultLocation: false, + annotationsArchiveName: `${annotationsArchiveNameCloud}.zip`, + CloudStorageId: createdCloudStorageId, + }; + uploadToTask(importParams); + }); + }); +}); diff --git a/tests/cypress/support/commands.js b/tests/cypress/support/commands.js index b5a2b01d933..a4e0c492ab7 100644 --- a/tests/cypress/support/commands.js +++ b/tests/cypress/support/commands.js @@ -1168,8 +1168,9 @@ Cypress.Commands.add('closeModalUnsupportedPlatform', () => { Cypress.Commands.add('exportTask', ({ type, format, archiveCustomName, + targetStorage = null, useDefaultLocation = true, }) => { - cy.interactMenu('Export task dataset'); + cy.clickInTaskMenu('Export task dataset', true); cy.get('.cvat-modal-export-task').should('be.visible').find('.cvat-modal-export-select').click(); cy.contains('.cvat-modal-export-option-item', format).should('be.visible').click(); cy.get('.cvat-modal-export-task').find('.cvat-modal-export-select').should('contain.text', format); @@ -1179,6 +1180,18 @@ Cypress.Commands.add('exportTask', ({ if (archiveCustomName) { cy.get('.cvat-modal-export-task').find('.cvat-modal-export-filename-input').type(archiveCustomName); } + if (!useDefaultLocation) { + cy.get('.cvat-modal-export-task').find('.cvat-settings-switch').click(); + cy.get('.cvat-select-target-storage').within(() => { + cy.get('.ant-select-selection-item').click(); + }); + cy.contains('.cvat-select-target-storage-location', targetStorage.location).should('be.visible').click(); + + if (targetStorage.cloudStorageId) { + cy.get('.cvat-search-target-storage-cloud-storage-field').click(); + cy.get('.cvat-cloud-storage-select-provider').click(); + } + } cy.contains('button', 'OK').click(); cy.get('.cvat-notification-notice-export-task-start').should('be.visible'); cy.closeNotification('.cvat-notification-notice-export-task-start'); diff --git a/tests/python/rest_api/test_tasks.py b/tests/python/rest_api/test_tasks.py index e56d85f3109..3efec38173b 100644 --- a/tests/python/rest_api/test_tasks.py +++ b/tests/python/rest_api/test_tasks.py @@ -557,9 +557,12 @@ def test_can_split_skeleton_tracks_on_jobs(self, jobs): "label_id": 59, "frame": 0, "shapes": [ + # https://github.com/opencv/cvat/issues/7498 + # https://github.com/opencv/cvat/pull/7615 + # This shape covers frame 0 to 7, + # We need to check if frame 5 is generated correctly for job#1 {"type": "points", "frame": 0, "points": [1.0, 2.0]}, - {"type": "points", "frame": 2, "points": [1.0, 2.0]}, - {"type": "points", "frame": 7, "points": [1.0, 2.0]}, + {"type": "points", "frame": 7, "points": [2.0, 4.0]}, ], }, ], @@ -588,8 +591,22 @@ def test_can_split_skeleton_tracks_on_jobs(self, jobs): track = job_annotations["tracks"][0] assert track.get("elements", []), "Expected to see track with elements" + def interpolate(frame): + # simple interpolate from ([1, 2], 1) to ([2, 4], 7) + return [(2.0 - 1.0) / 7 * (frame - 0) + 1.0, (4.0 - 2.0) / 7 * (frame - 0) + 2.0] + for element in track["elements"]: element_frames = set(shape["frame"] for shape in element["shapes"]) + assert all( + [ + not DeepDiff( + interpolate(shape["frame"]), shape["points"], significant_digits=2 + ) + for shape in element["shapes"] + if shape["frame"] >= 0 and shape["frame"] <= 7 + ] + ) + assert len(element["shapes"]) == 2 assert element_frames <= job_frame_range, "Track shapes get out of job frame range"