From a8ac6e0f59d301e38721e682633e8f5cb038127a Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Thu, 27 Jul 2023 18:08:39 +0300 Subject: [PATCH 01/32] Get rid of resize observer (#6572) ### Motivation and context Causes issues on Chrome in Cypress context starting from v115 ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [x] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- .../labels-editor/skeleton-configurator.tsx | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/cvat-ui/src/components/labels-editor/skeleton-configurator.tsx b/cvat-ui/src/components/labels-editor/skeleton-configurator.tsx index 18b2c34a696..197ef7b271e 100644 --- a/cvat-ui/src/components/labels-editor/skeleton-configurator.tsx +++ b/cvat-ui/src/components/labels-editor/skeleton-configurator.tsx @@ -55,7 +55,7 @@ export default class SkeletonConfigurator extends React.PureComponent; private svgRef: React.RefObject; - private canvasResizeObserver: ResizeObserver; + private resizeListener: EventListener; private nodeCounter: number; private elementCounter: number; private draggableElement: SVGElement | null; @@ -77,30 +77,30 @@ export default class SkeletonConfigurator extends React.PureComponent { - const [canvasEntry] = entries; - (canvasEntry.target as HTMLCanvasElement).style.height = `${canvasEntry.target.clientWidth}px`; - (canvasEntry.target as HTMLCanvasElement).height = canvasEntry.target.clientWidth; - (canvasEntry.target as HTMLCanvasElement).width = canvasEntry.target.clientWidth; - if (this.svgRef.current) { - (this.svgRef.current as SVGSVGElement).style.width = `${canvasEntry.target.clientWidth}px`; - (this.svgRef.current as SVGSVGElement).style.height = `${canvasEntry.target.clientWidth}px`; + this.resizeListener = () => { + const canvas = this.canvasRef.current; + const svg = this.svgRef.current; + if (canvas && svg) { + const { clientWidth } = canvas; + canvas.style.height = `${clientWidth}px`; + canvas.height = clientWidth; + canvas.width = clientWidth; + svg.style.width = `${clientWidth}px`; + svg.style.height = `${clientWidth}px`; } this.setCanvasBackground(); - }); + }; } public componentDidMount(): void { - const { canvasRef, svgRef } = this; + const { svgRef } = this; const { label } = this.props; - const canvas = canvasRef.current; const svg = svgRef.current; - if (canvas) { - this.canvasResizeObserver.observe(canvas); - } - + window.addEventListener('resize', this.resizeListener); window.document.addEventListener('mouseup', this.onDocumentMouseUp); + window.dispatchEvent(new Event('resize')); + if (svg) { svg.setAttribute('viewBox', '0 0 100 100'); svg.addEventListener('mousedown', this.onSVGClick); @@ -164,7 +164,7 @@ export default class SkeletonConfigurator extends React.PureComponent { From f4a516aca4c1ea0445a17fba36fd80f1feda74eb Mon Sep 17 00:00:00 2001 From: Andrey Zhavoronkov Date: Thu, 27 Jul 2023 18:09:08 +0300 Subject: [PATCH 02/32] Update develop (#6571) ### Motivation and context ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --------- Co-authored-by: Boris Sekachev Co-authored-by: Nikita Manovich --- CHANGELOG.md | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 51408b06edf..cfa756c9e72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,32 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## \[Unreleased\] +## \[Unreleased] +### Added + +- TDB + +### Changed + +- TDB + +### Deprecated + +- TDB + +### Removed + +- TDB + +### Fixed + +- TDB + +### Security + +- TDB + +## \[2.5.2\] - 2023-07-27 ### Added @@ -20,10 +45,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - \[Helm\] In Helm, we've added a configurable default storage option to the chart () -### Deprecated - -- TDB - ### Removed - \[Helm\] In Helm, we've eliminated the obligatory use of hardcoded traefik ingress () @@ -48,10 +69,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - \[Helm\] In Helm, we've resolved an issue with multiple caches in the same RWX volume, which was preventing db migration from starting () -### Security - -- TDB - ## \[2.5.1\] - 2023-07-19 ### Fixed From 4b86439d1353d50e40e4b64e807c461bef99f1fb Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Fri, 28 Jul 2023 12:09:09 +0300 Subject: [PATCH 03/32] Revamp the progress reporting API (#6556) There are several problems with how progress reporting is handled in the SDK, both on the interface and implementation level: * The user is supposed to know, for a given function, which units it will report progress in. This is unnecessary coupling and prevents us from switching to different units, or to have several progress bars using different units. * To create a TqdmProgressReporter, you have to create a tqdm instance, which immediately draws a progress bar. This works poorly if the function prints any log messages before the progress actually starts. * There's no easy way to automatically call `finish` on a progress bar, so some functions don't (for example, `Downloader.download_file`). This can cause unexpected output, since tqdm will refresh the progress bar in a background thread (possibly after we've already printed something else). * `iter` is basically broken, because it divides by `period`, which is 0 in all current implementations. * Even ignoring that, it's hard to use correctly, because you need to manually call `finish` in case of an exception. * `split` is not implemented and not used. * `StreamWithProgress.seek` assumes that the progress bar is at 0 at the start, and so does `ProgressReporter.iter`. The former also works incorrectly if the second argument is not `SEEK_SET`. Fix these problems by doing the following: * Add a new `start2` method which accepts more parameters. The default implementation calls `start`, so that if a user has implemented the original interface, it'll keep working. * Add a `DeferredTqdmProgressReporter` that accepts tqdm parameters instead of a tqdm instance, and only creates an instance after `start2` is called. Use it where `TqdmProgressReporter` was used before. The old `TqdmProgressReporter` is kept for compatibility, but it doesn't support any `start2` arguments other than those supported by the original `start`. * Add a `task` context manager, which automatically calls `start2` and `finish`. Use it everywhere instead of explicit `start`/`finish` calls. Remove `start`/`finish` calls from `StreamWithProgress` and `iter`. * Implement basic assertions to ensure that `start2` and `finish` are used correctly. * Remove `period` and `split`. * Rewrite `StreamWithProgress.seek` and `ProgressReporter.iter` to use relative progress reports. These changes should be backwards compatible for users who pass predefined or custom progress reporters into SDK functions. They are not backwards compatible for users who try to use progress reporters directly (e.g. calling `start`/`finish`). I don't consider that a significant issue, since the purpose of the `ProgressReporter` interface is for the user to get progress information from the SDK, not for them to use it in their own code. Originally developed for #6483. --- CHANGELOG.md | 7 +- cvat-cli/src/cvat_cli/cli.py | 22 ++-- cvat-sdk/cvat_sdk/core/downloading.py | 16 +-- cvat-sdk/cvat_sdk/core/helpers.py | 98 ++++++++++++------ cvat-sdk/cvat_sdk/core/progress.py | 141 ++++++++++++++------------ cvat-sdk/cvat_sdk/core/uploading.py | 92 +++++++++-------- tests/python/pytest.ini | 3 + tests/python/sdk/test_progress.py | 82 +++++++++++++++ tests/python/sdk/util.py | 5 +- 9 files changed, 302 insertions(+), 164 deletions(-) create mode 100644 tests/python/sdk/test_progress.py diff --git a/CHANGELOG.md b/CHANGELOG.md index cfa756c9e72..0d534d0269e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,11 +8,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## \[Unreleased] ### Added -- TDB +- \[SDK\] A `DeferredTqdmProgressReporter` class, which doesn't have glitchy output + like `TqdmProgressReporter` in certain circumstances + () ### Changed -- TDB +- \[SDK\] Custom `ProgressReporter` implementations should now override `start2` instead of `start` + () ### Deprecated diff --git a/cvat-cli/src/cvat_cli/cli.py b/cvat-cli/src/cvat_cli/cli.py index 9e5389acf86..1c480929801 100644 --- a/cvat-cli/src/cvat_cli/cli.py +++ b/cvat-cli/src/cvat_cli/cli.py @@ -7,9 +7,8 @@ import json from typing import Dict, List, Sequence, Tuple -import tqdm from cvat_sdk import Client, models -from cvat_sdk.core.helpers import TqdmProgressReporter +from cvat_sdk.core.helpers import DeferredTqdmProgressReporter from cvat_sdk.core.proxies.tasks import ResourceType @@ -67,7 +66,7 @@ def tasks_create( status_check_period=status_check_period, dataset_repository_url=dataset_repository_url, use_lfs=lfs, - pbar=self._make_pbar(), + pbar=DeferredTqdmProgressReporter(), ) print("Created task id", task.id) @@ -109,7 +108,7 @@ def tasks_dump( self.client.tasks.retrieve(obj_id=task_id).export_dataset( format_name=fileformat, filename=filename, - pbar=self._make_pbar(), + pbar=DeferredTqdmProgressReporter(), status_check_period=status_check_period, include_images=include_images, ) @@ -123,22 +122,21 @@ def tasks_upload( format_name=fileformat, filename=filename, status_check_period=status_check_period, - pbar=self._make_pbar(), + pbar=DeferredTqdmProgressReporter(), ) def tasks_export(self, task_id: str, filename: str, *, status_check_period: int = 2) -> None: """Download a task backup""" self.client.tasks.retrieve(obj_id=task_id).download_backup( - filename=filename, status_check_period=status_check_period, pbar=self._make_pbar() + filename=filename, + status_check_period=status_check_period, + pbar=DeferredTqdmProgressReporter(), ) def tasks_import(self, filename: str, *, status_check_period: int = 2) -> None: """Import a task from a backup file""" self.client.tasks.create_from_backup( - filename=filename, status_check_period=status_check_period, pbar=self._make_pbar() - ) - - def _make_pbar(self, title: str = None) -> TqdmProgressReporter: - return TqdmProgressReporter( - tqdm.tqdm(unit_scale=True, unit="B", unit_divisor=1024, desc=title) + filename=filename, + status_check_period=status_check_period, + pbar=DeferredTqdmProgressReporter(), ) diff --git a/cvat-sdk/cvat_sdk/core/downloading.py b/cvat-sdk/cvat_sdk/core/downloading.py index 3dc338f1468..fdde8430438 100644 --- a/cvat-sdk/cvat_sdk/core/downloading.py +++ b/cvat-sdk/cvat_sdk/core/downloading.py @@ -10,7 +10,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional from cvat_sdk.api_client.api_client import Endpoint -from cvat_sdk.core.progress import ProgressReporter +from cvat_sdk.core.progress import NullProgressReporter, ProgressReporter from cvat_sdk.core.utils import atomic_writer if TYPE_CHECKING: @@ -41,6 +41,9 @@ def download_file( assert not output_path.exists() + if pbar is None: + pbar = NullProgressReporter() + response = self._client.api_client.rest_client.GET( url, _request_timeout=timeout, @@ -53,18 +56,15 @@ def download_file( except ValueError: file_size = None - with atomic_writer(output_path, "wb") as fd: - if pbar is not None: - pbar.start(file_size, desc="Downloading") - + with atomic_writer(output_path, "wb") as fd, pbar.task( + total=file_size, desc="Downloading", unit_scale=True, unit="B", unit_divisor=1024 + ): while True: chunk = response.read(amt=CHUNK_SIZE, decode_content=False) if not chunk: break - if pbar is not None: - pbar.advance(len(chunk)) - + pbar.advance(len(chunk)) fd.write(chunk) def prepare_and_download_file_from_endpoint( diff --git a/cvat-sdk/cvat_sdk/core/helpers.py b/cvat-sdk/cvat_sdk/core/helpers.py index 36b739bebab..b04e33e4c68 100644 --- a/cvat-sdk/cvat_sdk/core/helpers.py +++ b/cvat-sdk/cvat_sdk/core/helpers.py @@ -6,6 +6,7 @@ import io import json +import warnings from typing import Any, Dict, Iterable, List, Optional, Union import tqdm @@ -13,7 +14,7 @@ from cvat_sdk import exceptions from cvat_sdk.api_client.api_client import Endpoint -from cvat_sdk.core.progress import ProgressReporter +from cvat_sdk.core.progress import BaseProgressReporter, ProgressReporter def get_paginated_collection( @@ -46,39 +47,83 @@ def get_paginated_collection( return results -class TqdmProgressReporter(ProgressReporter): +class _BaseTqdmProgressReporter(BaseProgressReporter): + tqdm: Optional[tqdm.tqdm] + + def report_status(self, progress: int): + super().report_status(progress) + self.tqdm.update(progress - self.tqdm.n) + + def advance(self, delta: int): + super().advance(delta) + self.tqdm.update(delta) + + +class TqdmProgressReporter(_BaseTqdmProgressReporter): def __init__(self, instance: tqdm.tqdm) -> None: super().__init__() + warnings.warn(f"use {DeferredTqdmProgressReporter.__name__} instead", DeprecationWarning) + self.tqdm = instance - @property - def period(self) -> float: - return 0 + def start2(self, total: int, *, desc: Optional[str] = None, **kwargs) -> None: + super().start2(total=total, desc=desc, **kwargs) - def start(self, total: int, *, desc: Optional[str] = None): self.tqdm.reset(total) self.tqdm.set_description_str(desc) - def report_status(self, progress: int): - self.tqdm.update(progress - self.tqdm.n) + def finish(self): + self.tqdm.refresh() + super().finish() - def advance(self, delta: int): - self.tqdm.update(delta) + +class DeferredTqdmProgressReporter(_BaseTqdmProgressReporter): + def __init__(self, tqdm_args: Optional[dict] = None) -> None: + super().__init__() + self.tqdm_args = tqdm_args or {} + self.tqdm = None + + def start2( + self, + total: int, + *, + desc: Optional[str] = None, + unit: str = "it", + unit_scale: bool = False, + unit_divisor: int = 1000, + **kwargs, + ) -> None: + super().start2( + total=total, + desc=desc, + unit=unit, + unit_scale=unit_scale, + unit_divisor=unit_divisor, + **kwargs, + ) + assert not self.tqdm + + self.tqdm = tqdm.tqdm( + **self.tqdm_args, + total=total, + desc=desc, + unit=unit, + unit_scale=unit_scale, + unit_divisor=unit_divisor, + ) def finish(self): - self.tqdm.refresh() + self.tqdm.close() + self.tqdm = None + super().finish() class StreamWithProgress: - def __init__(self, stream: io.RawIOBase, pbar: ProgressReporter, length: Optional[int] = None): + def __init__(self, stream: io.RawIOBase, pbar: ProgressReporter): self.stream = stream self.pbar = pbar - if hasattr(stream, "__len__"): - length = len(stream) - - self.length = length - pbar.start(length) + assert self.stream.tell() == 0 def read(self, size=-1): chunk = self.stream.read(size) @@ -86,22 +131,15 @@ def read(self, size=-1): self.pbar.advance(len(chunk)) return chunk - def __len__(self): - return self.length + def seek(self, pos: int, whence: int = io.SEEK_SET) -> None: + old_pos = self.stream.tell() + new_pos = self.stream.seek(pos, whence) + self.pbar.advance(new_pos - old_pos) + return new_pos - def seek(self, pos, start=0): - self.stream.seek(pos, start) - self.pbar.report_status(pos) - - def tell(self): + def tell(self) -> int: return self.stream.tell() - def __enter__(self) -> StreamWithProgress: - return self - - def __exit__(self, exc_type, exc_value, exc_traceback) -> None: - self.pbar.finish() - def expect_status(codes: Union[int, Iterable[int]], response: urllib3.HTTPResponse) -> None: if not hasattr(codes, "__iter__"): diff --git a/cvat-sdk/cvat_sdk/core/progress.py b/cvat-sdk/cvat_sdk/core/progress.py index f620e13c50c..7fd2d13a2cd 100644 --- a/cvat-sdk/cvat_sdk/core/progress.py +++ b/cvat-sdk/cvat_sdk/core/progress.py @@ -5,36 +5,69 @@ from __future__ import annotations -import math -from typing import Iterable, Optional, Tuple, TypeVar +import contextlib +from typing import ContextManager, Iterable, Optional, TypeVar T = TypeVar("T") class ProgressReporter: """ - Only one set of methods must be called: - - start - report_status / advance - finish - - iter - - split + Use as follows: - This class is supposed to manage the state of children progress bars - and release of their resources, if necessary. + with r.task(...): + r.report_status(...) + r.advance(...) + + for x in r.iter(...): + ... + + Implementations must override start2, finish, report_status and advance. """ - @property - def period(self) -> float: + @contextlib.contextmanager + def task(self, **kwargs) -> ContextManager[None]: """ - Returns reporting period. + Returns a context manager that represents a long-running task + for which progress can be reported. + + Entering it creates a progress bar, and exiting it destroys it. - For example, 0.1 would mean every 10%. + kwargs will be passed to `start()`. """ - raise NotImplementedError + self.start2(**kwargs) + + try: + yield None + finally: + self.finish() - def start(self, total: int, *, desc: Optional[str] = None): - """Initializes the progress bar""" + def start(self, total: int, *, desc: Optional[str] = None) -> None: + """ + This is a compatibility method. Override start2 instead. + """ raise NotImplementedError + def start2( + self, + total: int, + *, + desc: Optional[str] = None, + unit: str = "it", + unit_scale: bool = False, + unit_divisor: int = 1000, + **kwargs, + ) -> None: + """ + Initializes the progress bar. + + total, desc, unit, unit_scale, unit_divisor have the same meaning as in tqdm. + + kwargs is included for future extension; implementations of this method + must ignore it. + """ + self.start(total=total, desc=desc) + def report_status(self, progress: int): """Updates the progress bar""" raise NotImplementedError @@ -50,74 +83,52 @@ def finish(self): def iter( self, iterable: Iterable[T], - *, - total: Optional[int] = None, - desc: Optional[str] = None, ) -> Iterable[T]: """ Traverses the iterable and reports progress simultaneously. - Starts and finishes the progress bar automatically. - Args: iterable: An iterable to be traversed - total: The expected number of iterations. If not provided, will - try to use iterable.__len__. - desc: The status message Returns: An iterable over elements of the input sequence """ - if total is None and hasattr(iterable, "__len__"): - total = len(iterable) - - self.start(total, desc=desc) - - if total: - display_step = math.ceil(total * self.period) - - for i, elem in enumerate(iterable): - if not total or i % display_step == 0: - self.report_status(i) - + for elem in iterable: yield elem - - self.finish() - - def split(self, count: int) -> Tuple[ProgressReporter, ...]: - """ - Splits the progress bar into few independent parts. - In case of 0 must return an empty tuple. - - This class is supposed to manage the state of children progress bars - and release of their resources, if necessary. - """ - raise NotImplementedError + self.advance(1) -class NullProgressReporter(ProgressReporter): - @property - def period(self) -> float: - return 0 +class BaseProgressReporter(ProgressReporter): + def __init__(self) -> None: + self._in_progress = False - def start(self, total: int, *, desc: Optional[str] = None): - pass + def start2( + self, + total: int, + *, + desc: Optional[str] = None, + unit: str = "it", + unit_scale: bool = False, + unit_divisor: int = 1000, + **kwargs, + ) -> None: + assert not self._in_progress + self._in_progress = True def report_status(self, progress: int): - pass + assert self._in_progress def advance(self, delta: int): - pass + assert self._in_progress + + def finish(self) -> None: + assert self._in_progress + self._in_progress = False + + def __del__(self): + assert not self._in_progress, "Unfinished task!" - def iter( - self, - iterable: Iterable[T], - *, - total: Optional[int] = None, - desc: Optional[str] = None, - ) -> Iterable[T]: - yield from iterable - def split(self, count: int) -> Tuple[ProgressReporter]: - return (self,) * count +class NullProgressReporter(BaseProgressReporter): + pass diff --git a/cvat-sdk/cvat_sdk/core/uploading.py b/cvat-sdk/cvat_sdk/core/uploading.py index 85815ea3628..f13f862e91c 100644 --- a/cvat-sdk/cvat_sdk/core/uploading.py +++ b/cvat-sdk/cvat_sdk/core/uploading.py @@ -7,7 +7,7 @@ import json import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, ContextManager, Dict, List, Optional, Sequence, Tuple import requests import urllib3 @@ -180,13 +180,25 @@ def upload_file( # query params are used only in the extra messages assert meta["filename"] + if pbar is None: + pbar = NullProgressReporter() + + file_size = filename.stat().st_size + self._tus_start_upload(url, query_params=query_params) - real_filename = self._upload_file_data_with_tus( - url=url, filename=filename, meta=meta, pbar=pbar, logger=logger - ) + with self._uploading_task(pbar, file_size): + real_filename = self._upload_file_data_with_tus( + url=url, filename=filename, meta=meta, pbar=pbar, logger=logger + ) query_params["filename"] = real_filename return self._tus_finish_upload(url, query_params=query_params, fields=fields) + @staticmethod + def _uploading_task(pbar: ProgressReporter, total_size: int) -> ContextManager[None]: + return pbar.task( + total=total_size, desc="Uploading data", unit_scale=True, unit="B", unit_divisor=1024 + ) + def _wait_for_completion( self, url: str, @@ -219,21 +231,13 @@ def _make_tus_uploader(api_client: ApiClient, url: str, **kwargs): return _MyTusUploader(client=client, api_client=api_client, **kwargs) - def _upload_file_data_with_tus( - self, url, filename, *, meta=None, pbar=None, logger=None - ) -> str: - file_size = filename.stat().st_size - if pbar is None: - pbar = NullProgressReporter() - - with open(filename, "rb") as input_file, StreamWithProgress( - input_file, pbar, length=file_size - ) as input_file_with_progress: + def _upload_file_data_with_tus(self, url, filename, *, meta=None, pbar, logger=None) -> str: + with open(filename, "rb") as input_file: tus_uploader = self._make_tus_uploader( self._client.api_client, url=url.rstrip("/") + "/", metadata=meta, - file_stream=input_file_with_progress, + file_stream=StreamWithProgress(input_file, pbar), chunk_size=Uploader._CHUNK_SIZE, log_func=logger, ) @@ -347,44 +351,44 @@ def upload_files( ): bulk_file_groups, separate_files, total_size = self._split_files_by_requests(resources) - if pbar is not None: - pbar.start(total_size, desc="Uploading data") + if pbar is None: + pbar = NullProgressReporter() if str(kwargs.get("sorting_method")).lower() == "predefined": # Request file ordering, because we reorder files to send more efficiently kwargs.setdefault("upload_file_order", [p.name for p in resources]) - self._tus_start_upload(url) - - for group, group_size in bulk_file_groups: - files = {} - for i, filename in enumerate(group): - files[f"client_files[{i}]"] = ( - os.fspath(filename), - filename.read_bytes(), + with self._uploading_task(pbar, total_size): + self._tus_start_upload(url) + + for group, group_size in bulk_file_groups: + files = {} + for i, filename in enumerate(group): + files[f"client_files[{i}]"] = ( + os.fspath(filename), + filename.read_bytes(), + ) + response = self._client.api_client.rest_client.POST( + url, + post_params={"image_quality": kwargs["image_quality"], **files}, + headers={ + "Content-Type": "multipart/form-data", + "Upload-Multiple": "", + **self._client.api_client.get_common_headers(), + }, ) - response = self._client.api_client.rest_client.POST( - url, - post_params={"image_quality": kwargs["image_quality"], **files}, - headers={ - "Content-Type": "multipart/form-data", - "Upload-Multiple": "", - **self._client.api_client.get_common_headers(), - }, - ) - expect_status(200, response) + expect_status(200, response) - if pbar is not None: pbar.advance(group_size) - for filename in separate_files: - self._upload_file_data_with_tus( - url, - filename, - meta={"filename": filename.name}, - pbar=pbar, - logger=self._client.logger.debug, - ) + for filename in separate_files: + self._upload_file_data_with_tus( + url, + filename, + meta={"filename": filename.name}, + pbar=pbar, + logger=self._client.logger.debug, + ) self._tus_finish_upload(url, fields=kwargs) diff --git a/tests/python/pytest.ini b/tests/python/pytest.ini index 05cda52273d..775758a2969 100644 --- a/tests/python/pytest.ini +++ b/tests/python/pytest.ini @@ -8,3 +8,6 @@ timeout = 15 markers = with_external_services: The test requires services extrernal to the default CVAT deployment, e.g. a Git server etc. + +filterwarnings = + ignore::DeprecationWarning:cvat_sdk.core diff --git a/tests/python/sdk/test_progress.py b/tests/python/sdk/test_progress.py new file mode 100644 index 00000000000..a8f2fc10c6a --- /dev/null +++ b/tests/python/sdk/test_progress.py @@ -0,0 +1,82 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import io +import warnings +from typing import Optional + +import tqdm +from cvat_sdk.core.helpers import DeferredTqdmProgressReporter, TqdmProgressReporter +from cvat_sdk.core.progress import NullProgressReporter, ProgressReporter + + +def _exercise_reporter(r: ProgressReporter) -> None: + with r.task(total=5, desc="Test task", unit="parrots"): + r.advance(1) + r.report_status(4) + + for x in r.iter(["x"]): + assert x == "x" + + +def test_null_reporter(): + _exercise_reporter(NullProgressReporter()) + # NPR doesn't do anything, so there's nothing to assert + + +def test_tqdm_reporter(): + f = io.StringIO() + + instance = tqdm.tqdm(file=f) + + with warnings.catch_warnings(): + r = TqdmProgressReporter(instance) + + _exercise_reporter(r) + + output = f.getvalue() + + assert "100%" in output + assert "Test task" in output + # TPR doesn't support parameters other than "total" and "desc", + # so there won't be any parrots in the output. + + +def test_deferred_tqdm_reporter(): + f = io.StringIO() + + _exercise_reporter(DeferredTqdmProgressReporter({"file": f})) + + output = f.getvalue() + + assert "100%" in output + assert "Test task" in output + assert "parrots" in output + + +class _LegacyProgressReporter(ProgressReporter): + # overriding start instead of start2 + def start(self, total: int, *, desc: Optional[str] = None) -> None: + self.total = total + self.desc = desc + self.progress = 0 + + def report_status(self, progress: int): + self.progress = progress + + def advance(self, delta: int): + self.progress += delta + + def finish(self): + self.finished = True + + +def test_legacy_progress_reporter(): + r = _LegacyProgressReporter() + + _exercise_reporter(r) + + assert r.total == 5 + assert r.desc == "Test task" + assert r.progress == 5 diff --git a/tests/python/sdk/util.py b/tests/python/sdk/util.py index 5861c658111..1686330ad9f 100644 --- a/tests/python/sdk/util.py +++ b/tests/python/sdk/util.py @@ -9,12 +9,11 @@ import pytest from cvat_sdk.api_client.rest import RESTClientObject -from cvat_sdk.core.helpers import TqdmProgressReporter -from tqdm import tqdm +from cvat_sdk.core.helpers import DeferredTqdmProgressReporter def make_pbar(file, **kwargs): - return TqdmProgressReporter(tqdm(file=file, mininterval=0, **kwargs)) + return DeferredTqdmProgressReporter({"file": file, "mininterval": 0, **kwargs}) def generate_coco_json(filename: Path, img_info: Tuple[Path, int, int]): From 844a72b82c241c2f612debc113de54b2f3612e7e Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 28 Jul 2023 15:26:44 +0300 Subject: [PATCH 04/32] Optimized client side rendering, improved previews code (#6543) ### Motivation and context * drawImage is about x10+ times faster than putImageDate * unified interface on cvat-core * yes, there is still some time necessary to convert ImageData to ImageBitmap Before: image After: image ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [x] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- cvat-canvas/package.json | 2 +- cvat-canvas/src/typescript/canvasModel.ts | 2 +- cvat-canvas/src/typescript/canvasView.ts | 14 +-- cvat-core/package.json | 2 +- cvat-core/src/cloud-storage.ts | 20 ++-- cvat-core/src/frames.ts | 29 ++---- cvat-core/src/ml-model.ts | 39 +++----- cvat-core/src/project-implementation.ts | 7 +- cvat-core/src/server-proxy.ts | 39 +++----- cvat-core/src/session-implementation.ts | 23 ++--- cvat-core/tests/mocks/server-proxy.mock.js | 2 +- cvat-data/src/ts/cvat-data.ts | 96 ++++++------------- cvat-ui/src/actions/cloud-storage-actions.ts | 2 +- cvat-ui/src/actions/models-actions.ts | 2 +- .../advanced-configuration-form.tsx | 4 +- cvat-ui/src/cvat-core-wrapper.ts | 2 + 16 files changed, 94 insertions(+), 191 deletions(-) diff --git a/cvat-canvas/package.json b/cvat-canvas/package.json index 64ac4c6e81f..3205b986395 100644 --- a/cvat-canvas/package.json +++ b/cvat-canvas/package.json @@ -1,6 +1,6 @@ { "name": "cvat-canvas", - "version": "2.17.1", + "version": "2.17.2", "description": "Part of Computer Vision Annotation Tool which presents its canvas library", "main": "src/canvas.ts", "scripts": { diff --git a/cvat-canvas/src/typescript/canvasModel.ts b/cvat-canvas/src/typescript/canvasModel.ts index dcc3deca173..aaa21282ea3 100644 --- a/cvat-canvas/src/typescript/canvasModel.ts +++ b/cvat-canvas/src/typescript/canvasModel.ts @@ -14,7 +14,7 @@ export interface Size { export interface Image { renderWidth: number; renderHeight: number; - imageData: ImageData | CanvasImageSource; + imageData: ImageBitmap; } export interface Position { diff --git a/cvat-canvas/src/typescript/canvasView.ts b/cvat-canvas/src/typescript/canvasView.ts index 1a6d442d614..593ea962bf0 100644 --- a/cvat-canvas/src/typescript/canvasView.ts +++ b/cvat-canvas/src/typescript/canvasView.ts @@ -1420,19 +1420,7 @@ export class CanvasViewImpl implements CanvasView, Listener { this.background.setAttribute('height', `${image.renderHeight}px`); if (ctx) { - if (image.imageData instanceof ImageData) { - ctx.scale( - image.renderWidth / image.imageData.width, - image.renderHeight / image.imageData.height, - ); - ctx.putImageData(image.imageData, 0, 0); - // Transformation matrix must not affect the putImageData() method. - // By this reason need to redraw the image to apply scale. - // https://www.w3.org/TR/2dcontext/#dom-context-2d-putimagedata - ctx.drawImage(this.background, 0, 0); - } else { - ctx.drawImage(image.imageData, 0, 0); - } + ctx.drawImage(image.imageData, 0, 0, image.renderWidth, image.renderHeight); } if (model.imageIsDeleted) { diff --git a/cvat-core/package.json b/cvat-core/package.json index d8970e10744..21eb64183a5 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "9.3.0", + "version": "10.0.0", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", "scripts": { diff --git a/cvat-core/src/cloud-storage.ts b/cvat-core/src/cloud-storage.ts index bfc39f999d0..50655edad7d 100644 --- a/cvat-core/src/cloud-storage.ts +++ b/cvat-core/src/cloud-storage.ts @@ -254,8 +254,8 @@ export default class CloudStorage { return result; } - public async getPreview(): Promise { - const result = await PluginRegistry.apiWrapper.call(this, CloudStorage.prototype.getPreview); + public async preview(): Promise { + const result = await PluginRegistry.apiWrapper.call(this, CloudStorage.prototype.preview); return result; } @@ -375,20 +375,14 @@ Object.defineProperties(CloudStorage.prototype.getContent, { }, }); -Object.defineProperties(CloudStorage.prototype.getPreview, { +Object.defineProperties(CloudStorage.prototype.preview, { implementation: { writable: false, enumerable: false, - value: async function implementation(): Promise { - return new Promise((resolve, reject) => { - serverProxy.cloudStorages - .getPreview(this.id) - .then((result) => ((result) ? decodePreview(result) : Promise.resolve(result))) - .then((decoded) => resolve(decoded)) - .catch((error) => { - reject(error); - }); - }); + value: async function implementation(this: CloudStorage): Promise { + const preview = await serverProxy.cloudStorages.getPreview(this.id); + if (!preview) return ''; + return decodePreview(preview); }, }, }); diff --git a/cvat-core/src/frames.ts b/cvat-core/src/frames.ts index 247bafafb44..b1ed902d3ea 100644 --- a/cvat-core/src/frames.ts +++ b/cvat-core/src/frames.ts @@ -228,7 +228,7 @@ FrameData.prototype.data.implementation = async function (onServerRequest) { const taskDataCache = frameDataCache[this.jid]; const activeChunk = taskDataCache.activeChunkRequest; activeChunk.request = serverProxy.frames - .getData(null, this.jid, activeChunk.chunkNumber) + .getData(this.jid, activeChunk.chunkNumber) .then((chunk) => { frameDataCache[this.jid].activeChunkRequest.completed = true; if (!taskDataCache.nextChunkRequest) { @@ -666,12 +666,7 @@ async function getImageContext(jobID, frame) { serverProxy.frames .getImageContext(jobID, frame) .then((result) => { - if (isNode) { - // eslint-disable-next-line no-undef - resolve(global.Buffer.from(result, 'binary').toString('base64')); - } else if (isBrowser) { - resolve(result); - } + resolve(result); }) .catch((error) => { reject(error); @@ -690,18 +685,14 @@ export async function getContextImage(jobID, frame) { export function decodePreview(preview: Blob): Promise { return new Promise((resolve, reject) => { - if (isNode) { - resolve(global.Buffer.from(preview, 'binary').toString('base64')); - } else if (isBrowser) { - const reader = new FileReader(); - reader.onload = () => { - resolve(reader.result as string); - }; - reader.onerror = (error) => { - reject(error); - }; - reader.readAsDataURL(preview); - } + const reader = new FileReader(); + reader.onload = () => { + resolve(reader.result as string); + }; + reader.onerror = (error) => { + reject(error); + }; + reader.readAsDataURL(preview); }); } diff --git a/cvat-core/src/ml-model.ts b/cvat-core/src/ml-model.ts index 2d70f6aace3..04952d153f0 100644 --- a/cvat-core/src/ml-model.ts +++ b/cvat-core/src/ml-model.ts @@ -3,9 +3,9 @@ // // SPDX-License-Identifier: MIT -import { isBrowser, isNode } from 'browser-or-node'; import serverProxy from './server-proxy'; import PluginRegistry from './plugins'; +import { decodePreview } from './frames'; import { ModelProviders, ModelKind, ModelReturnType } from './enums'; import { SerializedModel, ModelAttribute, ModelParams, ModelTip, @@ -117,8 +117,8 @@ export default class MLModel { return result; } - public async getPreview(): Promise { - const result = await PluginRegistry.apiWrapper.call(this, MLModel.prototype.getPreview); + public async preview(): Promise { + const result = await PluginRegistry.apiWrapper.call(this, MLModel.prototype.preview); return result; } } @@ -127,7 +127,7 @@ Object.defineProperties(MLModel.prototype.save, { implementation: { writable: false, enumerable: false, - value: async function implementation(): Promise { + value: async function implementation(this: MLModel): Promise { const modelData = { provider: this.provider, url: this.serialized.url, @@ -144,7 +144,7 @@ Object.defineProperties(MLModel.prototype.delete, { implementation: { writable: false, enumerable: false, - value: async function implementation(): Promise { + value: async function implementation(this: MLModel): Promise { if (this.isDeletable) { await serverProxy.functions.delete(this.id); } @@ -152,32 +152,15 @@ Object.defineProperties(MLModel.prototype.delete, { }, }); -Object.defineProperties(MLModel.prototype.getPreview, { +Object.defineProperties(MLModel.prototype.preview, { implementation: { writable: false, enumerable: false, - value: async function implementation(): Promise { - if (this.provider === ModelProviders.CVAT) { - return ''; - } - return new Promise((resolve, reject) => { - serverProxy.functions - .getPreview(this.id) - .then((result) => { - if (isNode) { - resolve(global.Buffer.from(result, 'binary').toString('base64')); - } else if (isBrowser) { - const reader = new FileReader(); - reader.onload = () => { - resolve(reader.result); - }; - reader.readAsDataURL(result); - } - }) - .catch((error) => { - reject(error); - }); - }); + value: async function implementation(this: MLModel): Promise { + if (this.provider === ModelProviders.CVAT) return ''; + const preview = await serverProxy.functions.getPreview(this.id); + if (!preview) return ''; + return decodePreview(preview); }, }, }); diff --git a/cvat-core/src/project-implementation.ts b/cvat-core/src/project-implementation.ts index 17e0c953d1e..8a274d4ee30 100644 --- a/cvat-core/src/project-implementation.ts +++ b/cvat-core/src/project-implementation.ts @@ -83,10 +83,11 @@ export default function implementProject(projectClass) { return result; }; - projectClass.prototype.preview.implementation = async function () { + projectClass.prototype.preview.implementation = async function (this: Project): Promise { + if (this.id === null) return ''; const preview = await serverProxy.projects.getPreview(this.id); - const decoded = await decodePreview(preview); - return decoded; + if (!preview) return ''; + return decodePreview(preview); }; projectClass.prototype.annotations.exportDataset.implementation = async function ( diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index 6b438d6ca16..aaf5f39d99c 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -1402,7 +1402,7 @@ async function deleteJob(jobID: number): Promise { } } -async function getUsers(filter = { page_size: 'all' }) { +async function getUsers(filter = { page_size: 'all' }): Promise { const { backendAPI } = config; let response = null; @@ -1419,8 +1419,8 @@ async function getUsers(filter = { page_size: 'all' }) { return response.data.results; } -function getPreview(instance: 'projects' | 'tasks' | 'jobs' | 'cloudstorages') { - return async function (id: number) { +function getPreview(instance: 'projects' | 'tasks' | 'jobs' | 'cloudstorages' | 'functions') { + return async function (id: number | string): Promise { const { backendAPI } = config; let response = null; @@ -1434,11 +1434,15 @@ function getPreview(instance: 'projects' | 'tasks' | 'jobs' | 'cloudstorages') { throw new ServerError(`Could not get preview for "${instance}/${id}"`, code); } - return (response.status === 200) ? response.data : ''; + if (response.status === 404) { + return null; + } + + return response.data; }; } -async function getImageContext(jid, frame) { +async function getImageContext(jid: number, frame: number): Promise { const { backendAPI } = config; let response = null; @@ -1458,14 +1462,12 @@ async function getImageContext(jid, frame) { return response.data; } -async function getData(tid, jid, chunk) { +async function getData(jid: number, chunk: number): Promise { const { backendAPI } = config; - const url = jid === null ? `tasks/${tid}/data` : `jobs/${jid}/data`; - let response = null; try { - response = await workerAxios.get(`${backendAPI}/${url}`, { + response = await workerAxios.get(`${backendAPI}/jobs/${jid}/data`, { params: { ...enableOrganization(), quality: 'compressed', @@ -1560,23 +1562,6 @@ async function getFunctions(): Promise { } } -async function getFunctionPreview(modelID) { - const { backendAPI } = config; - - let response = null; - try { - const url = `${backendAPI}/functions/${modelID}/preview`; - response = await Axios.get(url, { - responseType: 'blob', - }); - } catch (errorData) { - const code = errorData.response ? errorData.response.status : errorData.code; - throw new ServerError(`Could not get preview for the model ${modelID} from the server`, code); - } - - return response.data; -} - async function getFunctionProviders() { const { backendAPI } = config; @@ -2435,7 +2420,7 @@ export default Object.freeze({ providers: getFunctionProviders, delete: deleteFunction, cancel: cancelFunctionRequest, - getPreview: getFunctionPreview, + getPreview: getPreview('functions'), }), issues: Object.freeze({ diff --git a/cvat-core/src/session-implementation.ts b/cvat-core/src/session-implementation.ts index 1cae70abd29..036f3c74be4 100644 --- a/cvat-core/src/session-implementation.ts +++ b/cvat-core/src/session-implementation.ts @@ -6,6 +6,7 @@ import { ArgumentError } from './exceptions'; import { HistoryActions, JobType } from './enums'; import { Storage } from './storage'; +import { Task as TaskClass, Job as JobClass } from './session'; import loggerStorage from './logger-storage'; import serverProxy from './server-proxy'; import { @@ -166,14 +167,11 @@ export function implementJob(Job) { return rangesData; }; - Job.prototype.frames.preview.implementation = async function () { - if (this.id === null || this.taskId === null) { - return ''; - } - + Job.prototype.frames.preview.implementation = async function (this: JobClass): Promise { + if (this.id === null || this.taskId === null) return ''; const preview = await serverProxy.jobs.getPreview(this.id); - const decoded = await decodePreview(preview); - return decoded; + if (!preview) return ''; + return decodePreview(preview); }; Job.prototype.frames.contextImage.implementation = async function (frameId) { @@ -583,14 +581,11 @@ export function implementTask(Task) { return rangesData; }; - Task.prototype.frames.preview.implementation = async function () { - if (this.id === null) { - return ''; - } - + Task.prototype.frames.preview.implementation = async function (this: TaskClass): Promise { + if (this.id === null) return ''; const preview = await serverProxy.tasks.getPreview(this.id); - const decoded = await decodePreview(preview); - return decoded; + if (!preview) return ''; + return decodePreview(preview); }; Task.prototype.frames.delete.implementation = async function (frame) { diff --git a/cvat-core/tests/mocks/server-proxy.mock.js b/cvat-core/tests/mocks/server-proxy.mock.js index d5587a42082..a42889eac71 100644 --- a/cvat-core/tests/mocks/server-proxy.mock.js +++ b/cvat-core/tests/mocks/server-proxy.mock.js @@ -373,7 +373,7 @@ class ServerProxy { } async function getPreview() { - return 'DUMMY_IMAGE'; + return null; } async function getData() { diff --git a/cvat-data/src/ts/cvat-data.ts b/cvat-data/src/ts/cvat-data.ts index b60d0a82384..78d0bcf558b 100644 --- a/cvat-data/src/ts/cvat-data.ts +++ b/cvat-data/src/ts/cvat-data.ts @@ -77,16 +77,15 @@ export class FrameProvider { private blockType: BlockType; /* - ImageBitmap when decode zip chunks - ImageData when decode video chunks + ImageBitmap when decode zip or video chunks Blob when 3D dimension null when not decoded yet */ - private frames: Record; + private frames: Record; private requestedBlockToDecode: null | BlockToDecode; private blocksAreBeingDecoded: Record; private promisedFrames: Record void; + resolve: (data: ImageBitmap | Blob) => void; reject: () => void; }>; private currentDecodingThreads: number; @@ -98,7 +97,7 @@ export class FrameProvider { private cachedEncodedBlocksLimit: number; private cachedDecodedBlocksLimit: number; - // used for video chunks to resize after decoding + // used for video chunks to get correct side after decoding private renderWidth: number; private renderHeight: number; @@ -246,44 +245,9 @@ export class FrameProvider { this._blocks[chunkNumber] = 'loading'; } - static cropImage( - imageBuffer: ArrayBuffer, - imageWidth: number, - imageHeight: number, - xOffset: number, - yOffset: number, - width: number, - height: number, - ): ImageData { - if (xOffset === 0 && width === imageWidth && yOffset === 0 && height === imageHeight) { - return new ImageData(new Uint8ClampedArray(imageBuffer), width, height); - } - const source = new Uint32Array(imageBuffer); - - const bufferSize = width * height * 4; - const buffer = new ArrayBuffer(bufferSize); - const rgbaInt32 = new Uint32Array(buffer); - const rgbaInt8Clamped = new Uint8ClampedArray(buffer); - - if (imageWidth === width) { - return new ImageData(new Uint8ClampedArray(imageBuffer, yOffset * 4, bufferSize), width, height); - } - - let writeIdx = 0; - for (let row = yOffset; row < height; row++) { - const start = row * imageWidth + xOffset; - rgbaInt32.set(source.subarray(start, start + width), writeIdx); - writeIdx += width; - } - - return new ImageData(rgbaInt8Clamped, width, height); - } - async startDecode(): Promise { const release = await this.mutex.acquire(); try { - const height = this.renderHeight; - const width = this.renderWidth; const { start, end, block } = this.requestedBlockToDecode; this.blocksRanges.push(`${start}:${end}`); @@ -307,34 +271,34 @@ export class FrameProvider { // ignore initialization message return; } + const keptIndex = index; + + // do not use e.data.height and e.data.width because they might be not correct + // instead, try to understand real height and width of decoded image via scale factor + const scaleFactor = Math.ceil(this.renderHeight / e.data.height); + const height = Math.round(this.renderHeight / scaleFactor); + const width = Math.round(this.renderWidth / scaleFactor); + + const array = new Uint8ClampedArray(e.data.buf.slice(0, width * height * 4)); + createImageBitmap(new ImageData(array, width)).then((bitmap) => { + this.frames[keptIndex] = bitmap; + const { resolveCallback } = this.blocksAreBeingDecoded[`${start}:${end}`]; + if (resolveCallback) { + resolveCallback(keptIndex); + } - const scaleFactor = Math.ceil(height / e.data.height); - this.frames[index] = FrameProvider.cropImage( - e.data.buf, - e.data.width, - e.data.height, - 0, - 0, - Math.floor(width / scaleFactor), - Math.floor(height / scaleFactor), - ); - - const { resolveCallback } = this.blocksAreBeingDecoded[`${start}:${end}`]; - if (resolveCallback) { - resolveCallback(index); - } - - if (index in this.promisedFrames) { - const { resolve } = this.promisedFrames[index]; - delete this.promisedFrames[index]; - resolve(this.frames[index]); - } + if (keptIndex in this.promisedFrames) { + const { resolve } = this.promisedFrames[keptIndex]; + delete this.promisedFrames[keptIndex]; + resolve(this.frames[keptIndex]); + } - if (index === end) { - worker.terminate(); - this.currentDecodingThreads--; - delete this.blocksAreBeingDecoded[`${start}:${end}`]; - } + if (keptIndex === end) { + worker.terminate(); + this.currentDecodingThreads--; + delete this.blocksAreBeingDecoded[`${start}:${end}`]; + } + }); index++; }; diff --git a/cvat-ui/src/actions/cloud-storage-actions.ts b/cvat-ui/src/actions/cloud-storage-actions.ts index f25c6d2a562..89bd3b72095 100644 --- a/cvat-ui/src/actions/cloud-storage-actions.ts +++ b/cvat-ui/src/actions/cloud-storage-actions.ts @@ -198,7 +198,7 @@ export function getCloudStoragePreviewAsync(cloudStorage: CloudStorage): ThunkAc return async (dispatch: ActionCreator): Promise => { dispatch(cloudStoragesActions.getCloudStoragePreview(cloudStorage.id)); try { - const result = await cloudStorage.getPreview(); + const result = await cloudStorage.preview(); dispatch(cloudStoragesActions.getCloudStoragePreviewSuccess(cloudStorage.id, result)); } catch (error) { dispatch(cloudStoragesActions.getCloudStoragePreviewFailed(cloudStorage.id, error)); diff --git a/cvat-ui/src/actions/models-actions.ts b/cvat-ui/src/actions/models-actions.ts index 4e42a8308e6..7cc374750e8 100644 --- a/cvat-ui/src/actions/models-actions.ts +++ b/cvat-ui/src/actions/models-actions.ts @@ -271,7 +271,7 @@ export function getModelProvidersAsync(): ThunkAction { export const getModelPreviewAsync = (model: MLModel): ThunkAction => async (dispatch) => { dispatch(modelsActions.getModelPreview(model.id)); try { - const result = await model.getPreview(); + const result = await model.preview(); dispatch(modelsActions.getModelPreviewSuccess(model.id, result)); } catch (error) { dispatch(modelsActions.getModelPreviewFailed(model.id, error)); diff --git a/cvat-ui/src/components/create-task-page/advanced-configuration-form.tsx b/cvat-ui/src/components/create-task-page/advanced-configuration-form.tsx index 4796af1c107..30ce8a552db 100644 --- a/cvat-ui/src/components/create-task-page/advanced-configuration-form.tsx +++ b/cvat-ui/src/components/create-task-page/advanced-configuration-form.tsx @@ -449,8 +449,8 @@ class AdvancedConfigurationForm extends React.PureComponent { > - Use zip/video chunks - + Prefer zip chunks + diff --git a/cvat-ui/src/cvat-core-wrapper.ts b/cvat-ui/src/cvat-core-wrapper.ts index 888c4413e54..ae5ff60e9e2 100644 --- a/cvat-ui/src/cvat-core-wrapper.ts +++ b/cvat-ui/src/cvat-core-wrapper.ts @@ -7,6 +7,7 @@ import _cvat from 'cvat-core/src/api'; import ObjectState from 'cvat-core/src/object-state'; import Webhook from 'cvat-core/src/webhook'; import MLModel from 'cvat-core/src/ml-model'; +import CloudStorage from 'cvat-core/src/cloud-storage'; import { ModelProvider } from 'cvat-core/src/lambda-manager'; import { Label, Attribute, @@ -60,6 +61,7 @@ export { Webhook, Issue, User, + CloudStorage, Organization, Comment, MLModel, From 1a0ad7dfd048727312db1435072427f361cf9f7b Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Mon, 31 Jul 2023 08:27:53 +0300 Subject: [PATCH 05/32] Increased default guide assets limitations (#6575) ### Motivation and context Current default assets limitations are too strict ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [x] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- CHANGELOG.md | 3 +++ cvat/apps/engine/views.py | 14 +++++++++++--- cvat/settings/base.py | 5 +++-- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0d534d0269e..e33f54ff589 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,9 +14,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed +- Increased default guide assets limitations (30 assets, up to 10Mb each) + () - \[SDK\] Custom `ProgressReporter` implementations should now override `start2` instead of `start` () + ### Deprecated - TDB diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index a89ebb773a1..220502aaa50 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -6,6 +6,7 @@ import io import os import os.path as osp +from PIL import Image from types import SimpleNamespace from typing import Optional import pytz @@ -2720,9 +2721,16 @@ def create(self, request, *args, **kwargs): self.perform_create(serializer) path = os.path.join(settings.ASSETS_ROOT, str(serializer.instance.uuid)) os.makedirs(path) - with open(os.path.join(path, file.name), 'wb+') as destination: - for chunk in file.chunks(): - destination.write(chunk) + if file.content_type in ('image/jpeg', 'image/png'): + image = Image.open(file) + if any(map(lambda x: x > settings.ASSET_MAX_IMAGE_SIZE, image.size)): + scale_factor = settings.ASSET_MAX_IMAGE_SIZE / max(image.size) + image = image.resize((map(lambda x: int(x * scale_factor), image.size))) + image.save(os.path.join(path, file.name)) + else: + with open(os.path.join(path, file.name), 'wb+') as destination: + for chunk in file.chunks(): + destination.write(chunk) headers = self.get_success_headers(serializer.data) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) diff --git a/cvat/settings/base.py b/cvat/settings/base.py index 82b198d3642..69e2c7f33d0 100644 --- a/cvat/settings/base.py +++ b/cvat/settings/base.py @@ -695,8 +695,9 @@ class CVAT_QUEUES(Enum): IMPORT_CACHE_SUCCESS_TTL = timedelta(hours=1) IMPORT_CACHE_CLEAN_DELAY = timedelta(hours=2) -ASSET_MAX_SIZE_MB = 2 +ASSET_MAX_SIZE_MB = 10 ASSET_SUPPORTED_TYPES = ('image/jpeg', 'image/png', 'image/webp', 'image/gif', 'application/pdf', ) -ASSET_MAX_COUNT_PER_GUIDE = 10 +ASSET_MAX_IMAGE_SIZE = 1920 +ASSET_MAX_COUNT_PER_GUIDE = 30 SMOKESCREEN_ENABLED = True From 5949ac3d8fb89157c1fb2c3543e509443dab3e3e Mon Sep 17 00:00:00 2001 From: Mariia Acoca <39969264+mdacoca@users.noreply.github.com> Date: Mon, 31 Jul 2023 09:20:12 +0200 Subject: [PATCH 06/32] added default attribute to the description (#6587) ### Motivation and context ### How has this been tested? ### Checklist - [ ] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [ ] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- .../manual/basics/create_an_annotation_task.md | 5 +++++ site/content/en/images/default_attribute.jpg | Bin 0 -> 26731 bytes 2 files changed, 5 insertions(+) create mode 100644 site/content/en/images/default_attribute.jpg diff --git a/site/content/en/docs/manual/basics/create_an_annotation_task.md b/site/content/en/docs/manual/basics/create_an_annotation_task.md index 0275b9367b3..a756753a2f0 100644 --- a/site/content/en/docs/manual/basics/create_an_annotation_task.md +++ b/site/content/en/docs/manual/basics/create_an_annotation_task.md @@ -153,6 +153,11 @@ To add an attribute, do the following: 4. In the **Attribute values** field, add attribute values.
To separate values use **Enter**.
To delete value, use **Backspace** or click **x** next to the value name. 5. (Optional) For mutable attributes, select **Mutable**. +6. (Optional) To set the default attribute, hover over it with mouse cursor and + click on it. The default attribute will change color to blue. + + ![Default attribute](/images/default_attribute.jpg) + To delete an attribute, click **Delete attribute**. diff --git a/site/content/en/images/default_attribute.jpg b/site/content/en/images/default_attribute.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ffa8871c07c8cc96860e1c72aa8cd29edaf4fb3c GIT binary patch literal 26731 zcmeFZcR*9^mM^?%Ql&{ph)VB@G^GSzrHM+F4pC5P0@6D{5D=sYCt>$jC@Z$*9Q5&rr}%(a=y+QB$9#Wu!k#%Roy_P0vEl zaE^(YnVE);m5qgojgg6&>1QWIBw!m-GD1u@h)GF)N)6Wb2g?Cc1~SHr zS8kj+r}L2fk~5R!^VoC>o3JFU|%gD-IyMFVQ zimIBr#%(?Q`v!(a#wJ#e9zU_Rv9)t`bNBG{^7aXS5fU2qGCU$K{>|Hj#CJ)_8JQom zK4s_R=9QF|l~+_&Ro65%x3spkcXW0Q4h@ftj(z)%o}HUtSX^3O`LT-G+1=YeIK&gDa2 zOLlbSa5|ADv_Cz*ACWPQ!N{I86M!2`L;~oK1Rzb50Qe)eGdFbM{d@$Vp&zU7rcH{l zi^qFQoF2fNV#+lLfI=ev1UjoTpv>r>r**V|I{Z!msFX$oa67P*bpjBgrUGajOcU(b8Y@phP$}SYpCL;%{IgQ^gv(;{VC@Y0Wc;9^*a&KF9UQ`nEuN=_gM%) zOgL%^a!ipwqJo;Ab2<#e2MGdsHAVT>=|3t>qBGskMuikONFB=3GRnoO8Yjg7L z$Ro#7%tX|Ndw?-O&eD;qd8SLh@yY7T&i*l=nKNGB!f^U&Q;@pMmiOuStrvk@F3L+K z3PKo8{UKh=TSR1$+o=lfnpl;CW^;{uQ^A}~Getlbw>RmkIw$kQ=eSG2WW*S@ahm|( zSv-R?znXB0q*xE{ouuUZt9Uj_*U38Hm3mrhB(tEZ{qaE@(U;C5OXIR)aq(n_tAZ!@KT0;9E3cE}Uoz(5ocDO}fjEl;3-g%9k7J#d4%Sm| zj~d`nNJW-q``PITJR8RFTSD8xEPq{1a+kdC8ucrUQLp$OuQZ`dwWub)Su^79j*g{l zP`s9D8f@O!?dm~k{b8|v&6HP~8W+O9KT05;dvI_1jZjqE(ib^4foJ`3Son-YL>Yo| zt%-B(?$wq=Zk|`>Lwxaubt56!$LD2AJ|@bFEOp(c4y5E_57@ea_JYic;?H73a_kuH z0^2uUPCIA|x51Ul zoE#%-P1h#}6@|MGlCYlsnQQ34Q3(gtFtIY$14Fdfej(1q?6ufi2fh&N*Ih;w2Ru#+ z;_A}R^8-fk+$EST&@s}P*rf5)1-C}|AnfCt(eD|J%OK)o&kJQARb32zx--To+tC;$ z5sn~n2c47Gbo=$iqNG_AgONSj;MGHx#6sAlG1q9KtZw&(svrShZy-%3!l+rY#+o{C zBqbep4*T}wDvZW`ZBkDYb0&W_RqMWO*{P;c^=s1~hp$THtQveh6MoF7Xmi*MWcBxS zN5-XQd5S@iV*#B4p5>k9@)D>;|GKo4wE-u`UA5C}B`*72(&AC4Cofljd?*~ZsqB1u zxN%ikY}Dh0YX6cD=^1%10U+2kd8`ddhIRMTb?0($hzWgMSvf*K-qU-&G^36|HJvh9 zgXDw$OZlspwPnv6zJWK(WAoZ)qC;lauRUV#bidcBVPLGka~z2}MfzQVw7}1J z88oY4q?d8L=gdvDoS~uRrnXNXMew>C1ZgVwR;g!&qj~-)nie`!Qx83OjrGm*%A=zj zxA$Z;H?yPr*y!h0+d5=juiRRgiMm2<{pP9rw~~a3Eaxf)v%cyXSa_Ns2Gxw@n6_nLaY>2DO=n4@sqq|J9GtN&gbdZujWmUG7=su)=`r!afPp@@=wq@T16c;5&#Y$IwF3(1m1ri1e|o( zfdgacavq2Q$#FKs*H!U!hv2dO2eZU4LwuBRIUlg^b>X!6+wy22e+2ybb|Cn8I+B zPIy^En-5eL|6Tfjmo+l0>g&rZIVV5H#-!0_GDuj1%fH%kb1p@H( zE?}M!AqapE@e9vHPfj$ojR~KJ^58xF)B^o&`a|-1`MzQJ@+jz=h)qE^Hvlp@?Vtl3 zYasx~(yJSQ?ru5cXuq5Qd`Tbx^KRbg7~C316+ZmWOi(Fr7~09h8o`A0lv+L!cP9S1d9g#vkIBT9JAu)j0D0hk&vLm=P#qVXlaZD{d>%6j{Ij1g-SL@gCh{ z#XRV*`$L5Q;QVb-``hq6eXyC(Z$?n~ML+Wb0>EJZ%*(p@IIY+%PQxJ^-R&vdMC1~i z{)CE>M2?*OQu4if9e2kKpubyv*$%q9W={Z~ixGgfwI%{ETcJQl0F;sR@EHXN&ZkU4 z{=@<+-!gS!NXpk-)+~BiyC!0gKbhhW^4O+aOFjX19=S91ig2P6MqN214a|UUhX*y-j_vV~M*Cn;oMiDlvwtGX7$A$<$uG8EG zE}REO?IjL6d-fr+oa=0Va*0~8qa~?O8VMpztVMkD8c(ET9MO-de|WesT9}Wo+is*A zC5=$4U9*(EtkZa7B6>d{*KQiKirB}H}WJfcL2U{7Ci1OFd)HqDUj~h=~GM10r|W_B%Tz6%bfY;_Z%#nru8d^9ye6H z(y57B3hG_dk;)0VS@OxHQS*~J8)v9b1fcczd&qS^`^T6@admw|b!cA~S|-`|RXC}0 zD@SB$DL1n+X`C0J_o=S3veY-%I$B=;rpUcbVqv1V)*7N{SB;gT_9r)fo9&g~HkkX9 zsX38aujQay$?W>qH8{^0>Pk}Zj6nt)MjRrW|xiTqEwCF9RO52 zPFOodffIofPdq*1JOOADT09~E+ph_LC8X3_u0M5gYZq6KlGzf;mLggU_`A#P?pOB> zXe)he1b<<6cLVR_tMtijRE$&=&uFw}sZ)Q4kk4tsHXiH=U|gW3!o zb$hy&Tuq{;Imb6sjmvC|Ql>JABvCEaOtm-^a_wvW#x5>~ZaXb&4i;{b*yNz4hfym3 zH2r-Iy_6~`dfh2Wu0r-ouu?}>qghY#}EJHihjDI zf7g>|GKPDOBmiquTBoEV=)dRL651*n%6q6FXtXXEw08Zq>ay~qae@ zW~)m1`paq!uNPWO->I%DKBw}8DiDY=xmvn-C!^+(AUhk=qQ41vFGn||xNF#E#mXFJ z7QG?><&_Kg17`>|{(>ExNxF69qDS%9^|G7?bwi&cRr4=SD%>0QJX3l9{+9=oNhOpz zG1^7evs*zIx9P&5rE&8cm97*0aDU&$m-r!fg(%U%r{@L5+o|J2fJgJvL-#9B1C$yP ze>{{}JKmb^5zL5)3;NJds(GUEz@@9`N;NIJmiCQ(^5nD#VA<+=Xu-gbG4%maa@qJC zbm{X$VqaNDbl@FN*6{VSAbk0lU;eLMwh#=&D5ELhe3KIZ6%9kXx4-*m zszoI+c&7*kar)c|zISuX&?rrAKma2fgu9W6u5+GP2(0za3A7ArIN76bNlL z9om!{4;M2w(2ur3%L*FNvz?LSWk_p7NhuXzyXRTKnVroLeY%Cy%(g>rpXkMI_%Y9j zu%E5Soja+|cGz1kU@2bi(ftMx@qppB%DjM<>5|^%_3l~oCqu?DDs`jfl$nVqdRoS3 zo7EO`ZFsHc?KJAdd(J?v0Pkt1XAtY{vrf-Tr6)tv3$cm!O>Yeq?rSlc7ml=MCA(C7 zX1--KM=oF&t);6B_-y=z_0b(c&5uCW=IBm2-g^8SZv1y|T{I%bidP~BH9un_Tv zU+FxGbxoH0%9w(8HkQX?56BO01ly~)+9v?_XAVfl8ktTBgYgiLD?IgXL;FcrL{^`U8sno{&wjuN-RS6 z^RRRb^$NQDG>C0O99f2alJH&Ye$;mjWq%@O=7;@vhhog3XOO!aB_~l#8(q_ycm>pA z!)af@T@NTNj$sJNjdze#9V`@^sjdj&OR|1z6>*UR+F{nQ!V2$>ll7}c@(%{^dSRrT z7=JLXqU%GE59gv-wL+I$Dr(HezK&1US0!9CXpO3UB>$Gjp6Eiy%(*>_^x0DhgRhGN z$g*+yUT&L!etv{Av4$hYirY9cD$BWT@QQJ=Kr@`u%lfW4RBgNBRhE%xabHEnb-&9k zylPX?4EkrgrZ3BNC`8+PYbNLC95~2(TWsM_h2C$>Z1U{gl2KjVn?COkn@;Lo`dcin zY%i71Q6tV3#XCD5)I{yq(NDZvdT!P(5PROaV)Se%MK%plpQINZy`MUU6v9%bAysJF zBCEtUtf1^BbSvRl*OqFHDOXg7Q3aFA_p)M+LsF+rW=C<8OKB9nxwOh@WLaH3zT6WV zxU<1#-gCy3Jrk6ArKFCuhY!*F3$1`DR7-X}m7B+$XJhutZ~* zf9lmv&Cn(om$5h;Th|bq#z97>#T1q72K^WcDw_(q$Xg%``Op^cuj*oZlO4@Qsnn6* z=bfFV&Iie!2hJ@Nfq}CrL5%-R4r*~*_9+{$pb{s+vH1q28N;^ znAt_yu5aAmf7Kt_KtJqIp6`pIT1hlGo_Hmb&xCdpPu%>-Y};sdTlEa}LL}8!CDwtc zqD(utSLYmZ#k;=+^rVOEp|^mVf6c;0f&!_V`ScPhr_!T3q2e% z{JO&gMxr&!UUS>2HT9k^cF(t$wfU=^48y(HU~8#_d-E5^mQHtO$UFGd+jT5b^8F;X z9qaQ=?2-{L$`sY~%HCR6)YlF@Ot5*Roo>$c=IsLp;KIUF*bQB;;Mx8cIODt>MtyV^suX*P4u?#~;#^NO5#pnsmeUD00LViFHZ-D^9Drb=0S!zbo|N}FE5 z?>6t-Z}>LB&NT>P`?`&W-AxOjjT|1Cv?076`SuEdus2cf{wN_c_+y@tBniTsqg|ia z=nrd8M31!Ncv%AYQf^cio4!NhRNZt+=ARvjwWW$UhR0UYNio)v_l;NWRo4k+nAv-n z^wcOJ#EY${P0_1#cA;}q{3o5~-%aHC1}aOZ&O$Knz@iT5LZdSx4;c9>7Zdv++kKPx_m=M}GJm z1ZR6?s?`m;`*Qkg-Kh&tuPUvH)L_a{r7B6IKnf9nP|G2u_R~ZHK-nJ+dJy*UMfd}_ zBLc+J?eR-bPt+1C7sr?Oa@Y_hxP-VBB{_fg`S_-dxefJ|LZ=v>^&_pTUG3eAr9U}llAY6>r`7c+lJ34B# z$(QDBV9a}8YcgG#IDscf>oCTr@~`PTLucxPy6 zQs<8hQkjSlzv>Y6tNd@%RRHVtUvCW%iR_brAejeiJ6`GXcNqAoHu?3_||P z4t&43B5vZ&U`<^~!+~(E+9dUt&h^ZClZ*%eyl4ICDc+Lqd{CE6QJ8Rpfy#JMq*L3A zQ_E85<6_1+kBM&?pHZ_HYQW=D5WWot4lzvdrW@{HTtFLKPB*~(H{sLwC)CM`y=W0? zJR>#oXeSJ0hUq|BQw_(<3fo%~-v8NEt-4m_;h*FY{By(g>urb0}+5Ac0jEM=f_ja1>CS3gp)o zitkUzrF>J$3|N0Nz_0bW`}~9dEtBbfUyJMarePcxwba#@ylwjWT+`z4Bifh^Bqw;f6(xbI zmlcW&(_D4Idx0B49a*Gy#0Q>&r2{s|>v%Kh`#E?Id{zV#hC8t(0Ed$x19b7{PJ=~4m&ZY}t6`G> z98s0ydytd|L9{@zl;l!0GZJYkn$`m%Bc43KcWJ8u`w`KHDGHxfbCiJ;}I<4oDYkV zhJ?W>>$K(+FHtNHyPe$Rz2uN)!r187KFw=iP9*NB{wn0!%tggKcjKJ2TWbkm57vsto4Ys&Noka~Rz}}A@a>$1U;m!5bF-}5 zA&Dl0)HX<)Pi}Lv-d_R>I?y1WtwCS{UD zV|zW)_)oNOJU+;8y9fOxnhP6!p6l|?*trIY?e*)NYJ7e9Qu%!O-l#_;ud*7ONN@rk z6Nt??wMz|j;iWwWhS&8k=Lz*~C7bdVo(7nde(Ak3@~ntH>(Qb=EcQSv#Lqm-Ye>`d zl>dk0!@g0A7*4o*Vh(h{U=e`}5rwX0!dEJhDX1sxMUD!F$gZ4X<4#A8TaomyF2)N2 z8*MnlK^gcC(`N!u6h#0srs^XTCDZH^+L?7_;%hv{)$Eaa1KrM&tpicUB3EyU)}G$v0EHq8`2h1Oo%P=F1i4Mb>SQb2zT;9Zh|V;VUU9P5oCZ$C|dRv+3`| zWtn!_9NT8!yLIdrCD-ZArRB~!K;~tFU1#BR->`8T3KDrz6^6;dCRZ<1mCC)M@8Es? z8c@BG+b>>LleLEB2Sb`ZDk!bitmCwPCuio#I@iWacWLstc%yPdKfgX zP${Wk8b&<2z zJWJz9qbLE&VL#2_$k+aAyiw-ZGorVAfj5k?}U$gp}fzWxc z*f&*dk$Vecg;<$+v+c2#TtATPj&b0>WcpjLvAFi0_hfN+ z)$Q6KZ58qw&L~^Vb#k72m?&Mv%J!3k#kSo!-h_jJpoGz#vxV`^@^ltod>$PZCIeZ2 z3j6C0!skc8(`62R%CYYesIc3J+M&kupTcGFug4AT%?yL#3#{{qW4zPp#fqYagKJMp z(?Cl4-atwFMRux|XK)hp@4@A^QZ*8%eCLh0Y_h<>>ixuQ#EBM+bWd)&`=^Jg;E&(B@mCOWS1z z!U+dy=U%BW{urBbi;@0*J>oHMe2?DpcvYg?ql$Y&X3;_}YpA1oINjGR4VPV6>!MVj z0MET4)&8s%o;g^x?HD}sRMxC-zBwb8xb=2N>6jyro2{b+{rHiE>!#RM!E+%lMuHur#AGT^ zZviprY|5$F>Hy>50lplcv|5NX;!k&fbg02RQgCTWKT)WH2Z0a(kmS72uJj2zT-B`{ zl{)bqSF;s!q}MoCBI&62bv95=x(&PBIF=?iQh3ScVMXzfT3USXE@w3--bgYymX5>= z>a{UX-EU>3FvjAn;p{jXUxM2%4gKaD8BKABHy32L(jWllIsX)Y95p}0MUKJtm&x&A z(l$S!YvpvuSje&P^_~U-5SzS)ubTlQElWiON;`X}I_=?TB~&K=r_Kw}GOhzR1ellL z3}sgCb5TlN-yUGoCd&$yc^sTdTkd?iIZb?3!5hE#!$<~t)}10i$jol6!lWfmY+}&F z&I9os@rb2Ol98S3jRhII$YN0E>yOTd{_h?>6!zQ6Nr?J*Jg(_9MF5-^@0CUtWcuq4 z^&>m>P?T77dmCjZ*MYex zmTM_mJOfryaJl9#t0+T7DUX8-!@-019LeLeeT4@);z^lcuvq@Fr?2x9DQD3e1ybWL zGmIl%ert8SX2y> zW8#C*QvCOW)}4T%1BItfSNo?+yT3<`myWZyD_$$|okfQ1pCy)G9y#}D;APpk-m-Vf z`bHyt^>V1?&Xui>{@aC4v*x$34KP+sD~<`)opX0+vtB4a-R-t^N;3rS;Z4@LNXdfx z*Q#14J+IG9yj>{pVK&=F;a*)=JN0fR03WE*4gElOf2l7P%yD=I`h4PQFrGlSgwo3h z!vtvUN#}ghF(dz|6!FB?#fE5D*D>n@t>#=JE`l82!~w@AG@J8-49JCXt>Xp6F-4Kn z-!D3XS2yEyaT(9rPdDMSG0T^F|D8z}RkU;Rug1qsy#*3R>BObQ$Hm#+6U7H=!>PNhP0RLD(;FZpSS!t`zDAg?FO5aZOtg6Af-5adK|{7 zfgYVPy;I_SRh(KCD_|cPnY-}Fu)BdAbAPKzo{quJ;91Jh+^LgEP}7uJN4#l_UK*#@ zsd9pZ==o>)HX7~xl>uDi-P_62_XnlA`-S!_GGM62zpvERjCx0(O`>>&0U?QmoBuwSBY`oOtR0w z5&-qj0?QG*08K56OuHF=6REPHcc3LDCvXf4}-?52Fbx@Vui!9Vh%z<&NR<^ zbOB1$#v@ajZ1VREYq0A-d=jc>dj(q4c!zM-2VP}h(z3aUM40aAWZ`B{WnL&{(%Vm0 zp(!V&oNZ@Tc4C5SDyrw{7*%SQPi6vPF}5(a zztSmnN1CqW^pjD7Duae#x#tV@feld}dCzl!nTuThHY4xE{)iC#c=_MT7RYhj%qT8T zUh?=(BRDAUBgihW!z}#_dS1#`(dr9h;l@WqIzmJ&tp>-NzXs?UbCpSwmy?;O_o)rNmj;C zpmQsjDk76ec@LmkZ_{h?mAl_`&A6ga#I1A%*LR2Sj$zqbQA4r8dl-~yQGQ2qgdmc4 z|1YfA??~QU#cImNrQcf$V^(0P%a-3_`uf8&PuedAWeb@iCA>uikxA*%OY9eur4xjh zGTTwl>QOiRmAO(G-^RTtOx?N4LLL z&(ji|kJxv@s%@f{4!{8;l@k$z$$ zFYpf6ocyG4)}GT#?7^VBy!vKg%d;E?;yB`L8=W9EyIkjOz1hW)=x}RjVBHnzw9wYQ zd~YdVj~Fe>#_;F2EiW0JIl9turk)zlaDqHK1vw_7egeR%hT}Pe?Vix#Nf3DP!m-Gd z_{p{1&D$U6-WYUi#i~Zs699_SF&rK|5d)VAz^lyPn`@3M2lT0M2p|61P0X*VaifXW z7j!a^@FpmDZ-9b;dFC2OdVwJ9MZg*!{t#rRz&n2Hz{Use|E(P%uVDnfXi<43>c`>L z9YvqSR;pL%lBy|o>VBj-Y!exQ&O&~2cHiLJVDb~j?N8#bg|UV;&VNV`|MV%*?V%|p z)mlPUJ(qy09Xd+HaJ+ z;0~Ber&$QSOf)(>q|98S>?!<-^b~^TQ#+^gf}`Hzyruty|G>%IuMy0GYhswe>cM!JV3bEAlGH>i;DA z{*M()A+Y=KV=S2IqFQgBBU%`hRTYVPH=gvpI5NSqJtjAIg(PrWxNX!zim})z`oqnV zed+_wr^yp1s#!7nchu@Af94P)ww-W6IB++?z+lWrnUU|c4!ZvNwU=!T91dW|x8Ckjw>I>;ljUtm?xAXv4%Wk~XZ!Y=((dhlSuo_y6s(1=rLWu;hq8Je77~CfD7#_Q zG6=L#KNb0*G?EMN2N}bFbKq;tBbrbyFJI7=ABGj|pGt1Qif`~uy(L?4a(B!om5Mzx z5x8lg4)O9CzK)10>t)rhDj$CA0PU)j)s&i{Sc>#N4O4hKFEX~oM#>Q*=@&&;ZU@`# z0bde&0j~Fb%U+}eb4OJKe^iB}##0wsqK%7O6nV#vxu;p#xAE2;0jorwAu7DH|voPdQn8SFm1~Y5SxqsfpJn zlkZ(xAk{7%*iqS7P{TEtJxjp(26%;SNlA)t*3Lp2{WF0`Yd%Vx8aoM($QrXk!y^91 zzKI3gmjGNHgciF)k176X#NR#)pQu1=nEWlX6DLp|O8J`oTkxgFVB+X1e2=jVaSw{|PBo`E?3Lpvy14C(8W9t1P-e=kR*<8@%~O;_a2cih^d#Me+Te^V zv{u;abhCH*VwB8oTWk9b^ZU%^PQ#ua#l3W)0d(B_!7Wxz$!r)JGPIcK$#qD1n_00- z$;Ol{tIIR*iSMg+{`Ox2*fHXXzMt&yT!ZfBG6&AC?$*9mOkevixAD3?zL>HIdlwX- z^TTvl_s2uW&7M<|X0&05+be&OTSKCI{^H-5gW9z!)i1nc)S!AvjO0xt_xYL;;k(*e z@e+)p-TYfF@2`2i{bPF~TIXyIO&;&mnoNj~?JU@g2T@#n6GU<0e9Y~y8F`X#c(Yia zgul_Bo9A=uW5(023~iW3X|;I14mGe5FJJ4l^rKgPY)ip&;hB9+Mg+RzQn|0MakXjn zTTw+})&rf}r$G^dMXt~OEs*>F!v9G66*$2~h5}*3*e?9X*>aD&yF%Ov^LZ(9s5_hKlXJH&QXw7B(KTJ`Tf!oSmBv zqq4R)o-4dNEIOciEIw|uArksS8UbJ%v>5DI4s0`^VNT`sP}8^_-_C?h{^0c{^;?%1 zBPl8;cz0R19E93dS54b?=kdx@ytutBy!`!7m^zSAI)f2d&ISPyx_R|oO0F?%RB>y% z=FUdKLsk2?l@0;jYxa9PO1!7#N^EOTx(063-e*C+lyzzn@woyK?n05TD;9VE3K zW;YU}`tCdk(O!^fNi?|YEt&YBxPAA6#XSO`x3`D<3dah1M(_;>oLwoFNi7%;8Jl$& zdopATCWltDosP$+1*F=~N{}^Y+MDdjnCx#Be?;GX5j}tJb7ejJ@@Fb4#g+>a0%BFM z&l~&C$E9C)N`NJwp1o+o%qj)vw{EX}FdcPzu$LiGFK{yb)v$IAQhM(lg2B&f#-F<7 z7)*krqxm6g@}{^lY&J@KiZ?2bLdINuz3&lqTrlY~PU*$YnX{g%!{z4kEfy@8Cowr_ z+c-PR;g#A?6jj6;u?vx{BbmkbROFG%Wp8FuB~A=i>u9_KYlwTbxU}1@)QWCTV*Q=i8ivsujf<{d zhZbJDX1rytblwKvS*Zx%g41iwI59SU$QH}f_|#F6xisFBy?8R=zSio1dw?~}^ApB+ zRA;r@=h*w)i#zz*UD*D3(exg?nZ{o7Mu2G(5nUB=}*qY2*DCkhG24ngJ? zV15W1X7NV=u+$v?MkCg?lDC!rwazzj!UFCgq}jZQypq9^xg6 zxK0h^&bW=Snzsha5v;32wk*d5324W6ZCeM;`9XyW49Nl3idp zCCvT!Ro$0Q6XtIxTy?+5PRXLy_idRvKB97adbXhyL(;~f8Y`ZZXM5d$sgOIyIbY+E z1*gWuycOz2AZ7hn`ZQ$4(Ey+ejz#*gnT;;xuIUT0rg1+7rYP2ZRZ_pbb5s2Vuir1;>vv z)X^Cj z|D<-r(pPHq(C>zcm{Nt`w)@ly+|)0U|4QDD*wzJUO%}C32taWJ-6?+!vd^DoPX4ZC zo44sBoqo>R2k(I|@vuzDwl4DMk_qHAQXBq5dL+O>n|#P5tPOQm1+z9!7g4t1VI=B~ zqJ@}(f{bp%r?{4?chlSzfw;lR&z2@7e*dva+n^r3}7gT(53Qo4Qz14ajRz*Vq z?i)va5x{Y?1~z=Vxfyj9cf;Y~oQUtMNwr3D^(cu%gA{%HHln)F{^Rq{GKtSU;Dvj+ z+(9rX3FDyHbO#LN2De1mt;&&da{--_5ecc2%FsuleR5Bfi(C4*Zdb?(8njOVOAVT! z^n)1B_N}lf!77YHRA_E`xB$e z5;E2}(YM84$DCbCak4rst4vHxR;h3Q?h#EB+kPqOv-A7QlW|^1RjWec%P7$VblM(paiu`dGBJSW64bhrLiP!B6R zzq)=R326>_5s{<%5pe`?h>Vz~ue&nu^n&N^=h3;C`$K+uxvP{`&Q90s7wcO-wDaEK z5xh*vtKKm!^hMzbWzhr)daLacigzC2|(ra>7}$lsHvD~mQ5?V zv{3E#z^ih=$YK~X`mR2=DG>6%x>3!@Bb5}r=)U+Y)F@RW3)8X~^EHUa?+Ph5^YAeQ z^3prMpNTQqTHEbEC?EiFM{_YJAk8#rWWR=m38bgaKMrfr$jPa;wckwCKS2n!B$Yo! zP;%^DN^#LN9e;8x{q49L&rv#SeR>vhqy-W_^ZM{+13yvlJBq;lE_4XP*`X9xki*b9 z$Rd1qCwm%#SH?}B`t+cdCzO7`chpv_anjaJ{l^^89RxUVTMn4yTYxnt05{d}dYdTh zNPzc%8tx_zi4V{}nFo_u$dC7v;QdwpgJ1+S=4(IJu0I5^Ck0 z()`&*kU!~x?7uDuX12lKf|){H2tW7{0ES>nC5{g1YGAn7WA;B;=}&sV(%e8h(h~p? z&{S`gz;MA15x)KjY!}StL>=+$UHy}l_)r@f+YPI8a(b=8+^STcG)3N+#YY>p0o)67 zDESr8)L>ttKXiXi{w+9Y!qNACnff(pCLLHHqWb4+vfMD|le>OWkHgHJQre~IfKG9Yx{xomJZ%adBwps;YY3F2R~?qFT!Gu5QDFp?_{7y{yM=*$DnOK zV8L0sDT`M(2bTbJHb;pa{uN|&;#k4e*g%#Mvk|GbgKWn&<1ZG0eM9a1G2mLFYe4;8 z6PyvcP}ts8_(I6fwSjzYlX+srfXfFfUj~h9{|4TV{%Kp1E#&Vt!BrGIn|MkyeRT2X ziq4-g5yXFMfeYi{J@|ZY-qI_?)3jZ zIzn&N31QkBx#dQA2Hnivounz5lP=iXW{5I9nyCTQf%n#OXKI578z_Fh46sm*wZ~7P z_7Y4^=IYna@O$Omb2XVl`^9ur)$%kiuwfrQ8;pMylvr;+SuGy!v95AqF7HmcdmbfY zJ3=Pu}@*Aq!qz9UlU_IvSrhGc>NKZ&@fxtSfFmgCsVB z+o39J>*w2>S-fG=2AWR~KG3Datl!**<3os{n*!b-0hbG%eHlOphNv^(+txq-k-su; zzj0S4Y%oyC6`baaSdfzVBOFZQNL>bd%EYsUfuQ`9N`L^I@kHQ!8eW}nL99N|rT)2} zpKIh#jgcAncC~LSB+FV@OGq#XJOvRr_9-}K_0MhdS5URWZODNpvCh_0=FXpZ%=zgM5a@4Q0B@+qG;tnK2H=Dy@K` z?b>K^+2RNCf5|NQTju>g`YNJ-Rc5%1*Z_SG-)<}90Q4sxKaZ_E4)Dg0Aa-C1;8Ed- zI@UZJkTeUvR2d}4rQ%_&{X(Xq-0|grVT2FSmwGW zMQ5ra9*U5N>If=Z32M0Cc{>yOrHH4oK@!oJXc8G~gtoh4$D(U{$a0uwqZ;0o&=@uF z{7CI$z1_q#W4XGi`dw~L7%ke?5b97~Hgvy>Ix=|q+S}B{rvTKehdAm3Q7r6Qc$1~| z%hcOKLN>z)O))*wvJ;0fw-`8GMEboK5ARQk+qaER8q2R&do~=KKI!d-yf}7xS%T-r zC?tPmIWDY7^5$zxI&kIa7f3vhd4BcirMd!Rq}+U(xW=}qovWD%)~N+Dem$h3ocopY z+1_B24D}O?a8=wN1Hqijiv~m*~)~!+;h6ft3%|~&? z47r^YA^MIAZmqu12=r76f410%ipsPTgIC73_u1YpN2R_IH;PJ%Enzz!={hENBKrN) ztnOs|;tonWG2g6WaPnT5p|HWd)Wce{$!vQI^P_Y1$H$2{|LxG_MkUDIhmS|NhfZ#k zEps$P@u%=yzPrY2*LO1i0J3zXF4DDY;cJ?933E1~rgZ}+o3-Ln7OED1&-cBI>&|+D zwTf99ao&9PJrUaa0vczAj0Aqe#0*%x4F)sV+*RsjgH|fU+k$~$k-C|Grs<%@IT;an zDQMBps3~1HhhPRu5?cGmKfOjoT_cV-{U7aIYgAKL7EYywj!+*XU{PsWXhoo?4IryP z6GRQKYKS6e0Yj@0m=JLkEdj#C7ob%hg-8L1D~~pj;i1E87*+`*V0o0rK+#FCMl6zo z@(4&s!p+U>uynPgGi!CtzggwyJ!{|G?CC2o3Vt4GwVwyBAPJg z@cc~ICvhth&K{T=DY!PedbBXUVUL>g~pe;m0d<&nz|hSTJpo55z@P7EVGS=zCD6fB&M>yTI0W=u~zt? z8EKUilbu((^M>43B)mOP!}`FY0s+f0+O@z&VcSo-RGI) z%Hl$g4fW>qLmcJl=W4*DBBcjtS-7XOju1!D1UPZ0SPu~if(P)^ka3t({32^wc5~Bc z_~aIbnAn^04$}D`Uz^0x1l`^$kRT3Mva6E#!yQqXWzpOU$wx<}gzLledpWj!)v@Ig zFAL9{QJ>skrMMy0f#bq)jbpEmUdGS;k4@vx&20PfyOe(ojfS_+k-wkKlJW-w8d6`s zo>k>rn&;%6y8Ku*q``QwOS@T9h|D(IL)zOKhF?aT(*2BD`Lzm9n zC_l1lg8%-G@iUH3|KxbTYvvL)AS2*FT>L&|0q-0fa zMIwqSl5!Dw=BOh)_6DMtDb^9p_zD_+9>D~#x2ce7%}QI^a2sP-JSC567j#FijzB*y zQ>!UJ%`d0{@5OIWhVpQGnEqk2kY~!kJs2fvasfgekAv*lj+Z_b&P*dfAJ@!? za4Xs)RJ$BXP6F#eLDeB`_Sin4y9^z{!BU8w67JYkw&?q3wYz!{`7o}h=5}%vBcUA> zjRicEz%29)$eFZ#Dn2j_As*y}sK!&$=sws^vwoc0zaV@}G@rrENTMz+P}yPHZvr4o zm0`ul=J^ZCXXWf#C-5Y9L64!u$W%9C4)t9jAfwKM%hqKE4%KU{Ku4`|rm5#>3-X6J zwmB4tx1C;-OGy%~khOrUxNMEO3f5l^A%HJMEc@GAqnvJ83@ zCUFo`UcD6g4-^nU*@&Pi4#QBr9KmJ`!6M6)G*yXPN8 zdUa#6sevg3@)m421Ts@cup5U!jI6oi5t*{ z%XNg_IyQg2j7@N6uaB9J9`rIBq)Jny+wS~#1F3z?U~~sz+f_@rF8JuW^ z>_XMT$qXGqbOGwaufzGWd@P0hGAgNnL}YDx@i?e{kc6y w)4y0-e7Cr5k@@i(OF!`fx+N@r4x=S3maur9LzeQz&-wKCg9W|&mRu+KH)f>7E&u=k literal 0 HcmV?d00001 From 4c1ff8548b848f5e642bafcdb6cdb0f7e8c3e03b Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Mon, 31 Jul 2023 13:46:00 +0300 Subject: [PATCH 07/32] Simplify Python linter workflows (#6577) Use pipx (preinstalled on GitHub-hosted runners), which takes care of virtual environments for us. Remove the apt-get commands; we don't actually need the packages that they're installing. --- .github/workflows/bandit.yml | 7 +------ .github/workflows/black.yml | 7 +------ .github/workflows/isort.yml | 7 +------ .github/workflows/pylint.yml | 12 +++++------- 4 files changed, 8 insertions(+), 25 deletions(-) diff --git a/.github/workflows/bandit.yml b/.github/workflows/bandit.yml index cab6e202e48..c20ca4fc609 100644 --- a/.github/workflows/bandit.yml +++ b/.github/workflows/bandit.yml @@ -18,16 +18,11 @@ jobs: CHANGED_FILES="${{steps.files.outputs.all_changed_files}}" if [[ ! -z $CHANGED_FILES ]]; then - sudo apt-get --no-install-recommends install -y build-essential curl python3-dev python3-pip python3-venv - python3 -m venv .env - . .env/bin/activate - pip install -U pip wheel setuptools - pip install bandit + pipx install bandit echo "Bandit version: "$(bandit --version | head -1) echo "The files will be checked: "$(echo $CHANGED_FILES) bandit -a file --ini .bandit $CHANGED_FILES - deactivate else echo "No files with the \"py\" extension found" fi diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 460dc102e04..5270e185edc 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -28,11 +28,7 @@ jobs: UPDATED_DIRS="${{steps.files.outputs.all_changed_files}}" if [[ ! -z $UPDATED_DIRS ]]; then - sudo apt-get --no-install-recommends install -y build-essential curl python3-dev python3-pip python3-venv - python3 -m venv .env - . .env/bin/activate - pip install -U pip wheel setuptools - pip install $(egrep "black.*" ./cvat-cli/requirements/development.txt) + pipx install $(egrep "black.*" ./cvat-cli/requirements/development.txt) echo "Black version: "$(black --version) echo "The dirs will be checked: $UPDATED_DIRS" @@ -40,7 +36,6 @@ jobs: for DIR in $UPDATED_DIRS; do black --check --diff $DIR || EXIT_CODE=$(($? | $EXIT_CODE)) || true done - deactivate exit $EXIT_CODE else echo "No files with the \"py\" extension found" diff --git a/.github/workflows/isort.yml b/.github/workflows/isort.yml index f3157b446c7..b5c2b4921aa 100644 --- a/.github/workflows/isort.yml +++ b/.github/workflows/isort.yml @@ -25,11 +25,7 @@ jobs: UPDATED_DIRS="${{steps.files.outputs.all_changed_files}}" if [[ ! -z $UPDATED_DIRS ]]; then - sudo apt-get --no-install-recommends install -y build-essential curl python3-dev python3-pip python3-venv - python3 -m venv .env - . .env/bin/activate - pip install -U pip wheel setuptools - pip install $(egrep "isort.*" ./cvat-cli/requirements/development.txt) + pipx install $(egrep "isort.*" ./cvat-cli/requirements/development.txt) echo "isort version: $(isort --version-number)" echo "The dirs will be checked: $UPDATED_DIRS" @@ -37,7 +33,6 @@ jobs: for DIR in $UPDATED_DIRS; do isort --check $DIR || EXIT_CODE=$(($? | $EXIT_CODE)) || true done - deactivate exit $EXIT_CODE else echo "No files with the \"py\" extension found" diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index f54623bc298..b890f02517d 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -19,17 +19,15 @@ jobs: CHANGED_FILES="${{steps.files.outputs.all_changed_files}}" if [[ ! -z $CHANGED_FILES ]]; then - sudo apt-get --no-install-recommends install -y build-essential curl python3-dev python3-pip python3-venv - python3 -m venv .env - . .env/bin/activate - pip install -U pip wheel setuptools - pip install $(egrep "pylint.*==.*" ./cvat/requirements/development.txt) - pip install $(egrep "django==.*" ./cvat/requirements/base.txt) + pipx install $(egrep "^pylint==" ./cvat/requirements/development.txt) + + pipx inject pylint \ + $(egrep "^pylint-.+==" ./cvat/requirements/development.txt) \ + $(egrep "^django==" ./cvat/requirements/base.txt) echo "Pylint version: "$(pylint --version | head -1) echo "The files will be checked: "$(echo $CHANGED_FILES) pylint $CHANGED_FILES - deactivate else echo "No files with the \"py\" extension found" fi From c56f2e9a9370be9f09e2087feff17bc91ab9f0d7 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Mon, 31 Jul 2023 13:49:36 +0300 Subject: [PATCH 08/32] Fix accidental prebuilt PyAV usage in the Dockerfile (#6573) When I integrated pip-compile, I neglected to notice that in the generated `*.txt` files the --no-binary=av option is on its own line, and therefore is stripped away by the sed command. Hardcode this option in the Dockerfile to make sure we build PyAV from source. Also, add a workaround for the fact that PyAV is incompatible with the recently-released Cython 3. --- CHANGELOG.md | 3 ++- Dockerfile | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e33f54ff589..87143015a4e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed -- TDB +- Accidentally using prebuilt FFmpeg bundled in PyAV instead of the custom + build. ### Security diff --git a/Dockerfile b/Dockerfile index d62b85d4980..75d2f0bf497 100644 --- a/Dockerfile +++ b/Dockerfile @@ -56,8 +56,11 @@ COPY utils/dataset_manifest/requirements.txt /tmp/utils/dataset_manifest/require RUN grep -q '^av==' /tmp/utils/dataset_manifest/requirements.txt RUN sed -i '/^av==/!d' /tmp/utils/dataset_manifest/requirements.txt +# Work around https://github.com/PyAV-Org/PyAV/issues/1140 +RUN pip install setuptools wheel 'cython<3' + RUN --mount=type=cache,target=/root/.cache/pip/http \ - python3 -m pip wheel \ + python3 -m pip wheel --no-binary=av --no-build-isolation \ -r /tmp/utils/dataset_manifest/requirements.txt \ -w /tmp/wheelhouse From 1c0a49fc80e994a2075084dde4fcf9438d33d053 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Wed, 2 Aug 2023 20:30:05 +0300 Subject: [PATCH 09/32] Add auto-annotation support to SDK and CLI (#6483) Introduce a `cvat-sdk auto-annotate` command that downloads data for a task, then runs a function on the local computer on that data, and uploads resulting annotations back to the task. To support this functionality, add a new SDK module, `cvat_sdk.auto_annotation`, that contains an interface that the functions must follow, and a driver that applies a function to a task. This will let users easily annotate their tasks with custom DL models. --- .github/workflows/full.yml | 13 +- .github/workflows/main.yml | 2 +- CHANGELOG.md | 5 + cvat-cli/src/cvat_cli/__main__.py | 1 + cvat-cli/src/cvat_cli/cli.py | 33 +- cvat-cli/src/cvat_cli/parser.py | 35 + cvat-sdk/cvat_sdk/auto_annotation/__init__.py | 17 + cvat-sdk/cvat_sdk/auto_annotation/driver.py | 303 +++++++++ .../auto_annotation/functions/__init__.py | 0 .../auto_annotation/functions/yolov8n.py | 36 + .../cvat_sdk/auto_annotation/interface.py | 166 +++++ cvat-sdk/cvat_sdk/datasets/common.py | 3 + cvat-sdk/cvat_sdk/datasets/task_dataset.py | 7 +- .../openapi-generator/setup.mustache | 1 + tests/python/cli/example_function.py | 23 + tests/python/cli/test_cli.py | 26 + tests/python/sdk/test_auto_annotation.py | 629 ++++++++++++++++++ tests/python/sdk/test_datasets.py | 1 + 18 files changed, 1293 insertions(+), 8 deletions(-) create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/__init__.py create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/driver.py create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/functions/__init__.py create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/interface.py create mode 100644 tests/python/cli/example_function.py create mode 100644 tests/python/sdk/test_auto_annotation.py diff --git a/.github/workflows/full.yml b/.github/workflows/full.yml index d2f0a23a3c3..b90a8599c10 100644 --- a/.github/workflows/full.yml +++ b/.github/workflows/full.yml @@ -152,16 +152,19 @@ jobs: name: expected_schema path: cvat/schema-expected.yml - - name: Running REST API and SDK tests - id: run_tests + - name: Generate SDK run: | pip3 install -r cvat-sdk/gen/requirements.txt ./cvat-sdk/gen/generate.sh - pip3 install -r ./tests/python/requirements.txt - pip3 install -e ./cvat-sdk - pip3 install -e ./cvat-cli + - name: Install SDK + run: | + pip3 install -r ./tests/python/requirements.txt \ + -e './cvat-sdk[pytorch,ultralytics]' -e ./cvat-cli + - name: Running REST API and SDK tests + id: run_tests + run: | pytest tests/python/ - name: Creating a log file from cvat containers diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 023645d5238..b1a85b809fd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -160,7 +160,7 @@ jobs: - name: Install SDK run: | pip3 install -r ./tests/python/requirements.txt \ - -e './cvat-sdk[pytorch]' -e ./cvat-cli + -e './cvat-sdk[pytorch,ultralytics]' -e ./cvat-cli - name: Run REST API and SDK tests id: run_tests diff --git a/CHANGELOG.md b/CHANGELOG.md index 87143015a4e..4e2e25a1e77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - \[SDK\] A `DeferredTqdmProgressReporter` class, which doesn't have glitchy output like `TqdmProgressReporter` in certain circumstances () +- \[SDK, CLI\] A `cvat_sdk.auto_annotation` module that provides + functionality for automatically annotating a task by running a + user-provided function on the local machine, and a corresponding CLI command + (`auto-annotate`) + () ### Changed diff --git a/cvat-cli/src/cvat_cli/__main__.py b/cvat-cli/src/cvat_cli/__main__.py index 673adf3e6ae..2448587245f 100755 --- a/cvat-cli/src/cvat_cli/__main__.py +++ b/cvat-cli/src/cvat_cli/__main__.py @@ -59,6 +59,7 @@ def main(args: List[str] = None): "upload": CLI.tasks_upload, "export": CLI.tasks_export, "import": CLI.tasks_import, + "auto-annotate": CLI.tasks_auto_annotate, } parser = make_cmdline_parser() parsed_args = parser.parse_args(args) diff --git a/cvat-cli/src/cvat_cli/cli.py b/cvat-cli/src/cvat_cli/cli.py index 1c480929801..d0417944aa6 100644 --- a/cvat-cli/src/cvat_cli/cli.py +++ b/cvat-cli/src/cvat_cli/cli.py @@ -4,9 +4,13 @@ from __future__ import annotations +import importlib +import importlib.util import json -from typing import Dict, List, Sequence, Tuple +from pathlib import Path +from typing import Dict, List, Optional, Sequence, Tuple +import cvat_sdk.auto_annotation as cvataa from cvat_sdk import Client, models from cvat_sdk.core.helpers import DeferredTqdmProgressReporter from cvat_sdk.core.proxies.tasks import ResourceType @@ -140,3 +144,30 @@ def tasks_import(self, filename: str, *, status_check_period: int = 2) -> None: status_check_period=status_check_period, pbar=DeferredTqdmProgressReporter(), ) + + def tasks_auto_annotate( + self, + task_id: int, + *, + function_module: Optional[str] = None, + function_file: Optional[Path] = None, + clear_existing: bool = False, + allow_unmatched_labels: bool = False, + ) -> None: + if function_module is not None: + function = importlib.import_module(function_module) + elif function_file is not None: + module_spec = importlib.util.spec_from_file_location("__cvat_function__", function_file) + function = importlib.util.module_from_spec(module_spec) + module_spec.loader.exec_module(function) + else: + assert False, "function identification arguments missing" + + cvataa.annotate_task( + self.client, + task_id, + function, + pbar=DeferredTqdmProgressReporter(), + clear_existing=clear_existing, + allow_unmatched_labels=allow_unmatched_labels, + ) diff --git a/cvat-cli/src/cvat_cli/parser.py b/cvat-cli/src/cvat_cli/parser.py index 32630baaad8..c1a7e6c3abd 100644 --- a/cvat-cli/src/cvat_cli/parser.py +++ b/cvat-cli/src/cvat_cli/parser.py @@ -10,6 +10,7 @@ import os import textwrap from distutils.util import strtobool +from pathlib import Path from cvat_sdk.core.proxies.tasks import ResourceType @@ -369,6 +370,40 @@ def make_cmdline_parser() -> argparse.ArgumentParser: help="time interval between checks if archive processing was finished, in seconds", ) + ####################################################################### + # Auto-annotate + ####################################################################### + auto_annotate_task_parser = task_subparser.add_parser( + "auto-annotate", + description="Automatically annotate a CVAT task by running a function on the local machine.", + ) + auto_annotate_task_parser.add_argument("task_id", type=int, help="task ID") + + function_group = auto_annotate_task_parser.add_mutually_exclusive_group(required=True) + + function_group.add_argument( + "--function-module", + metavar="MODULE", + help="qualified name of a module to use as the function", + ) + + function_group.add_argument( + "--function-file", + metavar="PATH", + type=Path, + help="path to a Python source file to use as the function", + ) + + auto_annotate_task_parser.add_argument( + "--clear-existing", action="store_true", help="Remove existing annotations from the task" + ) + + auto_annotate_task_parser.add_argument( + "--allow-unmatched-labels", + action="store_true", + help="Allow the function to declare labels not configured in the task", + ) + return parser diff --git a/cvat-sdk/cvat_sdk/auto_annotation/__init__.py b/cvat-sdk/cvat_sdk/auto_annotation/__init__.py new file mode 100644 index 00000000000..e5dbdf9fcc4 --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from .driver import BadFunctionError, annotate_task +from .interface import ( + DetectionFunction, + DetectionFunctionContext, + DetectionFunctionSpec, + keypoint, + keypoint_spec, + label_spec, + rectangle, + shape, + skeleton, + skeleton_label_spec, +) diff --git a/cvat-sdk/cvat_sdk/auto_annotation/driver.py b/cvat-sdk/cvat_sdk/auto_annotation/driver.py new file mode 100644 index 00000000000..8c1c71b46e8 --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/driver.py @@ -0,0 +1,303 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import logging +from typing import List, Mapping, Optional, Sequence + +import attrs + +import cvat_sdk.models as models +from cvat_sdk.core import Client +from cvat_sdk.core.progress import NullProgressReporter, ProgressReporter +from cvat_sdk.datasets.task_dataset import TaskDataset + +from .interface import DetectionFunction, DetectionFunctionContext, DetectionFunctionSpec + + +class BadFunctionError(Exception): + """ + An exception that signifies that an auto-detection function has violated some constraint + set by its interface. + """ + + +class _AnnotationMapper: + @attrs.frozen + class _MappedLabel: + id: int + sublabel_mapping: Mapping[int, Optional[int]] + expected_num_elements: int = 0 + + _label_mapping: Mapping[int, Optional[_MappedLabel]] + + def _build_mapped_label( + self, fun_label: models.ILabel, ds_labels_by_name: Mapping[str, models.ILabel] + ) -> Optional[_MappedLabel]: + if getattr(fun_label, "attributes", None): + raise BadFunctionError(f"label attributes are currently not supported") + + ds_label = ds_labels_by_name.get(fun_label.name) + if ds_label is None: + if not self._allow_unmatched_labels: + raise BadFunctionError(f"label {fun_label.name!r} is not in dataset") + + self._logger.info( + "label %r is not in dataset; any annotations using it will be ignored", + fun_label.name, + ) + return None + + sl_map = {} + + if getattr(fun_label, "sublabels", []): + fun_label_type = getattr(fun_label, "type", "any") + if fun_label_type != "skeleton": + raise BadFunctionError( + f"label {fun_label.name!r} with sublabels has type {fun_label_type!r} (should be 'skeleton')" + ) + + ds_sublabels_by_name = {ds_sl.name: ds_sl for ds_sl in ds_label.sublabels} + + for fun_sl in fun_label.sublabels: + if not hasattr(fun_sl, "id"): + raise BadFunctionError( + f"sublabel {fun_sl.name!r} of label {fun_label.name!r} has no ID" + ) + + if fun_sl.id in sl_map: + raise BadFunctionError( + f"sublabel {fun_sl.name!r} of label {fun_label.name!r} has same ID as another sublabel ({fun_sl.id})" + ) + + ds_sl = ds_sublabels_by_name.get(fun_sl.name) + if not ds_sl: + if not self._allow_unmatched_labels: + raise BadFunctionError( + f"sublabel {fun_sl.name!r} of label {fun_label.name!r} is not in dataset" + ) + + self._logger.info( + "sublabel %r of label %r is not in dataset; any annotations using it will be ignored", + fun_sl.name, + fun_label.name, + ) + sl_map[fun_sl.id] = None + continue + + sl_map[fun_sl.id] = ds_sl.id + + return self._MappedLabel( + ds_label.id, sublabel_mapping=sl_map, expected_num_elements=len(ds_label.sublabels) + ) + + def __init__( + self, + logger: logging.Logger, + fun_labels: Sequence[models.ILabel], + ds_labels: Sequence[models.ILabel], + *, + allow_unmatched_labels: bool, + ) -> None: + self._logger = logger + self._allow_unmatched_labels = allow_unmatched_labels + + ds_labels_by_name = {ds_label.name: ds_label for ds_label in ds_labels} + + self._label_mapping = {} + + for fun_label in fun_labels: + if not hasattr(fun_label, "id"): + raise BadFunctionError(f"label {fun_label.name!r} has no ID") + + if fun_label.id in self._label_mapping: + raise BadFunctionError( + f"label {fun_label.name} has same ID as another label ({fun_label.id})" + ) + + self._label_mapping[fun_label.id] = self._build_mapped_label( + fun_label, ds_labels_by_name + ) + + def validate_and_remap(self, shapes: List[models.LabeledShapeRequest], ds_frame: int) -> None: + new_shapes = [] + + for shape in shapes: + if hasattr(shape, "id"): + raise BadFunctionError("function output shape with preset id") + + if hasattr(shape, "source"): + raise BadFunctionError("function output shape with preset source") + shape.source = "auto" + + if shape.frame != 0: + raise BadFunctionError( + f"function output shape with unexpected frame number ({shape.frame})" + ) + + shape.frame = ds_frame + + try: + mapped_label = self._label_mapping[shape.label_id] + except KeyError: + raise BadFunctionError( + f"function output shape with unknown label ID ({shape.label_id})" + ) + + if not mapped_label: + continue + + shape.label_id = mapped_label.id + + if getattr(shape, "attributes", None): + raise BadFunctionError( + "function output shape with attributes, which is not yet supported" + ) + + new_shapes.append(shape) + + if shape.type.value == "skeleton": + new_elements = [] + seen_sl_ids = set() + + for element in shape.elements: + if hasattr(element, "id"): + raise BadFunctionError("function output shape element with preset id") + + if hasattr(element, "source"): + raise BadFunctionError("function output shape element with preset source") + element.source = "auto" + + if element.frame != 0: + raise BadFunctionError( + f"function output shape element with unexpected frame number ({element.frame})" + ) + + element.frame = ds_frame + + if element.type.value != "points": + raise BadFunctionError( + f"function output skeleton with element type other than 'points' ({element.type.value})" + ) + + try: + mapped_sl_id = mapped_label.sublabel_mapping[element.label_id] + except KeyError: + raise BadFunctionError( + f"function output shape with unknown sublabel ID ({element.label_id})" + ) + + if not mapped_sl_id: + continue + + if mapped_sl_id in seen_sl_ids: + raise BadFunctionError( + "function output skeleton with multiple elements with same sublabel" + ) + + element.label_id = mapped_sl_id + + seen_sl_ids.add(mapped_sl_id) + + new_elements.append(element) + + if len(new_elements) != mapped_label.expected_num_elements: + # new_elements could only be shorter than expected, + # because the reverse would imply that there are more distinct sublabel IDs + # than are actually defined in the dataset. + assert len(new_elements) < mapped_label.expected_num_elements + + raise BadFunctionError( + f"function output skeleton with fewer elements than expected ({len(new_elements)} vs {mapped_label.expected_num_elements})" + ) + + shape.elements[:] = new_elements + else: + if getattr(shape, "elements", None): + raise BadFunctionError("function output non-skeleton shape with elements") + + shapes[:] = new_shapes + + +@attrs.frozen +class _DetectionFunctionContextImpl(DetectionFunctionContext): + frame_name: str + + +def annotate_task( + client: Client, + task_id: int, + function: DetectionFunction, + *, + pbar: Optional[ProgressReporter] = None, + clear_existing: bool = False, + allow_unmatched_labels: bool = False, +) -> None: + """ + Downloads data for the task with the given ID, applies the given function to it + and uploads the resulting annotations back to the task. + + Only tasks with 2D image (not video) data are supported at the moment. + + client is used to make all requests to the CVAT server. + + Currently, the only type of auto-annotation function supported is the detection function. + A function of this type is applied independently to each image in the task. + The resulting annotations are then combined and modified as follows: + + * The label IDs are replaced with the IDs of the corresponding labels in the task. + * The frame numbers are replaced with the frame number of the image. + * The sources are set to "auto". + + See the documentation for DetectionFunction for more details. + + If the function is found to violate any constraints set in its interface, BadFunctionError + is raised. + + pbar, if supplied, is used to report progress information. + + If clear_existing is true, any annotations already existing in the tesk are removed. + Otherwise, they are kept, and the new annotations are added to them. + + The allow_unmatched_labels parameter controls the behavior in the case when a detection + function declares a label in its spec that has no corresponding label in the task. + If it's set to true, then such labels are allowed, and any annotations returned by the + function that refer to this label are ignored. Otherwise, BadFunctionError is raised. + """ + + if pbar is None: + pbar = NullProgressReporter() + + dataset = TaskDataset(client, task_id) + + assert isinstance(function.spec, DetectionFunctionSpec) + + mapper = _AnnotationMapper( + client.logger, + function.spec.labels, + dataset.labels, + allow_unmatched_labels=allow_unmatched_labels, + ) + + shapes = [] + + with pbar.task(total=len(dataset.samples), unit="samples"): + for sample in pbar.iter(dataset.samples): + frame_shapes = function.detect( + _DetectionFunctionContextImpl(sample.frame_name), sample.media.load_image() + ) + mapper.validate_and_remap(frame_shapes, sample.frame_index) + shapes.extend(frame_shapes) + + client.logger.info("Uploading annotations to task %d", task_id) + + if clear_existing: + client.tasks.api.update_annotations( + task_id, task_annotations_update_request=models.LabeledDataRequest(shapes=shapes) + ) + else: + client.tasks.api.partial_update_annotations( + "create", + task_id, + patched_labeled_data_request=models.PatchedLabeledDataRequest(shapes=shapes), + ) diff --git a/cvat-sdk/cvat_sdk/auto_annotation/functions/__init__.py b/cvat-sdk/cvat_sdk/auto_annotation/functions/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py b/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py new file mode 100644 index 00000000000..325f6036a63 --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py @@ -0,0 +1,36 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +""" +An auto-annotation detection function powered by the YOLOv8n model. +Outputs rectangles. +""" + +from typing import Iterator, List + +import PIL.Image +from ultralytics import YOLO +from ultralytics.engine.results import Results + +import cvat_sdk.auto_annotation as cvataa +import cvat_sdk.models as models + +_model = YOLO("yolov8n.pt") + +spec = cvataa.DetectionFunctionSpec( + labels=[cvataa.label_spec(name, id) for id, name in _model.names.items()], +) + + +def _yolo_to_cvat(results: List[Results]) -> Iterator[models.LabeledShapeRequest]: + for result in results: + for box, label in zip(result.boxes.xyxy, result.boxes.cls): + yield cvataa.rectangle( + label_id=int(label.item()), + points=[p.item() for p in box], + ) + + +def detect(context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + return list(_yolo_to_cvat(_model.predict(source=image, verbose=False))) diff --git a/cvat-sdk/cvat_sdk/auto_annotation/interface.py b/cvat-sdk/cvat_sdk/auto_annotation/interface.py new file mode 100644 index 00000000000..160d12533d6 --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/interface.py @@ -0,0 +1,166 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import abc +from typing import List, Sequence + +import attrs +import PIL.Image +from typing_extensions import Protocol + +import cvat_sdk.models as models + + +@attrs.frozen(kw_only=True) +class DetectionFunctionSpec: + """ + Static information about an auto-annotation detection function. + """ + + labels: Sequence[models.PatchedLabelRequest] + """ + Information about labels that the function supports. + + The members of the sequence must follow the same constraints as if they were being + used to create a CVAT project, and the following additional constraints: + + * The id attribute must be set to a distinct integer. + + * The id attribute of any sublabels must be set to an integer, distinct between all + sublabels of the same parent label. + + * There must not be any attributes (attribute support may be added in a future version). + + It's recommented to use the helper factory functions (label_spec, skeleton_label_spec, + keypoint_spec) to create the label objects, as they are more concise than the model + constructors and help to follow some of the constraints. + """ + + +class DetectionFunctionContext(metaclass=abc.ABCMeta): + """ + Information that is supplied to an auto-annotation detection function. + """ + + @property + @abc.abstractmethod + def frame_name(self) -> str: + """ + The file name of the frame that the current image corresponds to in + the dataset. + """ + ... + + +class DetectionFunction(Protocol): + """ + The interface that an auto-annotation detection function must implement. + + A detection function is supposed to accept an image and return a list of shapes + describing objects in that image. + + Since the same function could be used with multiple datasets, it needs some way + to refer to labels without using dataset-specific label IDs. The way this is + accomplished is that the function declares its own labels via the spec attribute, + and then refers to those labels in the returned annotations. The caller then matches + up the labels from the function's spec with the labels in the actual dataset, and + replaces the label IDs in the returned annotations with IDs of the corresponding + labels in the dataset. + + The matching of labels between the function and the dataset is done by name. + Therefore, a function can be used with a dataset if they have (at least some) labels + that have the same name. + """ + + @property + def spec(self) -> DetectionFunctionSpec: + """Returns the function's spec.""" + ... + + def detect( + self, context: DetectionFunctionContext, image: PIL.Image.Image + ) -> List[models.LabeledShapeRequest]: + """ + Detects objects on the supplied image and returns the results. + + The supplied context will contain information about the current image. + + The returned LabeledShapeRequest objects must follow general constraints + imposed by the data model (such as the number of points in a shape), + as well as the following additional constraints: + + * The id attribute must not be set. + + * The source attribute must not be set. + + * The frame_id attribute must be set to 0. + + * The label_id attribute must equal one of the label IDs + in the function spec. + + * There must not be any attributes (attribute support may be added in a + future version). + + * The above constraints also apply to each sub-shape (element of a shape), + except that the label_id of a sub-shape must equal one of the sublabel IDs + of the label of its parent shape. + + It's recommented to use the helper factory functions (shape, rectangle, skeleton, + keypoint) to create the shape objects, as they are more concise than the model + constructors and help to follow some of the constraints. + + The function must not retain any references to the returned objects, + so that the caller may freely modify them. + """ + ... + + +# spec factories + + +# pylint: disable-next=redefined-builtin +def label_spec(name: str, id: int, **kwargs) -> models.PatchedLabelRequest: + """Helper factory function for PatchedLabelRequest.""" + return models.PatchedLabelRequest(name=name, id=id, **kwargs) + + +# pylint: disable-next=redefined-builtin +def skeleton_label_spec( + name: str, id: int, sublabels: Sequence[models.SublabelRequest], **kwargs +) -> models.PatchedLabelRequest: + """Helper factory function for PatchedLabelRequest with type="skeleton".""" + return models.PatchedLabelRequest(name=name, id=id, type="skeleton", sublabels=sublabels) + + +# pylint: disable-next=redefined-builtin +def keypoint_spec(name: str, id: int, **kwargs) -> models.SublabelRequest: + """Helper factory function for SublabelRequest.""" + return models.SublabelRequest(name=name, id=id, **kwargs) + + +# annotation factories + + +def shape(label_id: int, **kwargs) -> models.LabeledShapeRequest: + """Helper factory function for LabeledShapeRequest with frame=0.""" + return models.LabeledShapeRequest(label_id=label_id, frame=0, **kwargs) + + +def rectangle(label_id: int, points: Sequence[float], **kwargs) -> models.LabeledShapeRequest: + """Helper factory function for LabeledShapeRequest with frame=0 and type="rectangle".""" + return shape(label_id, type="rectangle", points=points, **kwargs) + + +def skeleton( + label_id: int, elements: Sequence[models.SubLabeledShapeRequest], **kwargs +) -> models.LabeledShapeRequest: + """Helper factory function for LabeledShapeRequest with frame=0 and type="skeleton".""" + return shape(label_id, type="skeleton", elements=elements, **kwargs) + + +def keypoint(label_id: int, points: Sequence[float], **kwargs) -> models.SubLabeledShapeRequest: + """Helper factory function for SubLabeledShapeRequest with frame=0 and type="points".""" + return models.SubLabeledShapeRequest( + label_id=label_id, frame=0, type="points", points=points, **kwargs + ) diff --git a/cvat-sdk/cvat_sdk/datasets/common.py b/cvat-sdk/cvat_sdk/datasets/common.py index 2b8269dbd56..c621a2d2ed3 100644 --- a/cvat-sdk/cvat_sdk/datasets/common.py +++ b/cvat-sdk/cvat_sdk/datasets/common.py @@ -50,6 +50,9 @@ class Sample: frame_index: int """Index of the corresponding frame in its task.""" + frame_name: str + """File name of the frame in its task.""" + annotations: FrameAnnotations """Annotations belonging to the frame.""" diff --git a/cvat-sdk/cvat_sdk/datasets/task_dataset.py b/cvat-sdk/cvat_sdk/datasets/task_dataset.py index 58607045793..111528d4371 100644 --- a/cvat-sdk/cvat_sdk/datasets/task_dataset.py +++ b/cvat-sdk/cvat_sdk/datasets/task_dataset.py @@ -126,7 +126,12 @@ def ensure_chunk(chunk_index): # TODO: tracks? self._samples = [ - Sample(frame_index=k, annotations=v, media=self._TaskMediaElement(self, k)) + Sample( + frame_index=k, + frame_name=data_meta.frames[k].name, + annotations=v, + media=self._TaskMediaElement(self, k), + ) for k, v in self._frame_annotations.items() ] diff --git a/cvat-sdk/gen/templates/openapi-generator/setup.mustache b/cvat-sdk/gen/templates/openapi-generator/setup.mustache index eb89f5d2055..fc6f34144da 100644 --- a/cvat-sdk/gen/templates/openapi-generator/setup.mustache +++ b/cvat-sdk/gen/templates/openapi-generator/setup.mustache @@ -78,6 +78,7 @@ setup( install_requires=BASE_REQUIREMENTS, extras_require={ "pytorch": ['torch', 'torchvision'], + "ultralytics": ["ultralytics"], }, package_dir={"": "."}, packages=find_packages(include=["cvat_sdk*"]), diff --git a/tests/python/cli/example_function.py b/tests/python/cli/example_function.py new file mode 100644 index 00000000000..4b1b4185782 --- /dev/null +++ b/tests/python/cli/example_function.py @@ -0,0 +1,23 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from typing import List + +import cvat_sdk.auto_annotation as cvataa +import cvat_sdk.models as models +import PIL.Image + +spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 0), + ], +) + + +def detect( + context: cvataa.DetectionFunctionContext, image: PIL.Image.Image +) -> List[models.LabeledShapeRequest]: + return [ + cvataa.rectangle(0, [1, 2, 3, 4]), + ] diff --git a/tests/python/cli/test_cli.py b/tests/python/cli/test_cli.py index 6dbcbb5241f..fbb6f73fe5f 100644 --- a/tests/python/cli/test_cli.py +++ b/tests/python/cli/test_cli.py @@ -302,3 +302,29 @@ def test_can_control_organization_context(self): all_task_ids = list(map(int, self.run_cli("ls").split())) assert personal_task_id in all_task_ids assert org_task_id in all_task_ids + + def test_auto_annotate_with_module(self, fxt_new_task: Task): + annotations = fxt_new_task.get_annotations() + assert not annotations.shapes + + self.run_cli( + "auto-annotate", + str(fxt_new_task.id), + f"--function-module={__package__}.example_function", + ) + + annotations = fxt_new_task.get_annotations() + assert annotations.shapes + + def test_auto_annotate_with_file(self, fxt_new_task: Task): + annotations = fxt_new_task.get_annotations() + assert not annotations.shapes + + self.run_cli( + "auto-annotate", + str(fxt_new_task.id), + f"--function-file={Path(__file__).with_name('example_function.py')}", + ) + + annotations = fxt_new_task.get_annotations() + assert annotations.shapes diff --git a/tests/python/sdk/test_auto_annotation.py b/tests/python/sdk/test_auto_annotation.py new file mode 100644 index 00000000000..05814affee7 --- /dev/null +++ b/tests/python/sdk/test_auto_annotation.py @@ -0,0 +1,629 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +import io +import sys +from logging import Logger +from pathlib import Path +from types import SimpleNamespace as namespace +from typing import Any, List, Tuple + +import cvat_sdk.auto_annotation as cvataa +import PIL.Image +import pytest +from cvat_sdk import Client, models +from cvat_sdk.core.proxies.tasks import ResourceType + +from shared.utils.helpers import generate_image_file + +from .util import make_pbar + +try: + import numpy as np + from ultralytics.engine.results import Results as UResults +except ModuleNotFoundError: + np = None + UResults = None + + +@pytest.fixture(autouse=True) +def _common_setup( + tmp_path: Path, + fxt_login: Tuple[Client, str], + fxt_logger: Tuple[Logger, io.StringIO], +): + logger = fxt_logger[0] + client = fxt_login[0] + client.logger = logger + client.config.cache_dir = tmp_path / "cache" + + api_client = client.api_client + for k in api_client.configuration.logger: + api_client.configuration.logger[k] = logger + + +class TestTaskAutoAnnotation: + @pytest.fixture(autouse=True) + def setup( + self, + tmp_path: Path, + fxt_login: Tuple[Client, str], + ): + self.client = fxt_login[0] + self.images = [ + generate_image_file("1.png", size=(333, 333), color=(0, 0, 0)), + generate_image_file("2.png", size=(333, 333), color=(100, 100, 100)), + ] + + image_dir = tmp_path / "images" + image_dir.mkdir() + + image_paths = [] + for image in self.images: + image_path = image_dir / image.name + image_path.write_bytes(image.getbuffer()) + image_paths.append(image_path) + + self.task = self.client.tasks.create_from_data( + models.TaskWriteRequest( + "Auto-annotation test task", + labels=[ + models.PatchedLabelRequest(name="person"), + models.PatchedLabelRequest(name="car"), + models.PatchedLabelRequest( + name="cat", + type="skeleton", + sublabels=[ + models.SublabelRequest(name="head"), + models.SublabelRequest(name="tail"), + ], + ), + ], + ), + resource_type=ResourceType.LOCAL, + resources=image_paths, + ) + + task_labels = self.task.get_labels() + self.task_labels_by_id = {label.id: label for label in task_labels} + self.cat_sublabels_by_id = { + sl.id: sl + for sl in next(label for label in task_labels if label.name == "cat").sublabels + } + + # The initial annotation is just to check that it gets erased after auto-annotation + self.task.update_annotations( + models.PatchedLabeledDataRequest( + shapes=[ + models.LabeledShapeRequest( + frame=0, + label_id=next(iter(self.task_labels_by_id)), + type="rectangle", + points=[1.0, 2.0, 3.0, 4.0], + ), + ], + ) + ) + + def test_detection_rectangle(self): + spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 123), + cvataa.label_spec("bicycle (should be ignored)", 456), + ], + ) + + def detect( + context: cvataa.DetectionFunctionContext, image: PIL.Image.Image + ) -> List[models.LabeledShapeRequest]: + assert context.frame_name in {"1.png", "2.png"} + assert image.width == image.height == 333 + return [ + cvataa.rectangle( + 123, # car + # produce different coordinates for different images + [*image.getpixel((0, 0)), 300 + int(context.frame_name[0])], + ), + cvataa.shape( + 456, # ignored + type="points", + points=[1, 1], + ), + ] + + cvataa.annotate_task( + self.client, + self.task.id, + namespace(spec=spec, detect=detect), + clear_existing=True, + allow_unmatched_labels=True, + ) + + annotations = self.task.get_annotations() + + shapes = sorted(annotations.shapes, key=lambda shape: shape.frame) + + assert len(shapes) == 2 + + for i, shape in enumerate(shapes): + assert shape.frame == i + assert shape.type.value == "rectangle" + assert self.task_labels_by_id[shape.label_id].name == "car" + assert shape.points[3] in {301, 302} + + assert shapes[0].points[0] != shapes[1].points[0] + assert shapes[0].points[3] != shapes[1].points[3] + + def test_detection_skeleton(self): + spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.skeleton_label_spec( + "cat", + 123, + [ + cvataa.keypoint_spec("head", 10), + cvataa.keypoint_spec("torso (should be ignored)", 20), + cvataa.keypoint_spec("tail", 30), + ], + ), + ], + ) + + def detect(context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + assert image.width == image.height == 333 + return [ + cvataa.skeleton( + 123, # cat + [ + # ignored + cvataa.keypoint(20, [20, 20]), + # tail + cvataa.keypoint(30, [30, 30]), + # head + cvataa.keypoint(10, [10, 10]), + ], + ), + ] + + cvataa.annotate_task( + self.client, + self.task.id, + namespace(spec=spec, detect=detect), + clear_existing=True, + allow_unmatched_labels=True, + ) + + annotations = self.task.get_annotations() + + shapes = sorted(annotations.shapes, key=lambda shape: shape.frame) + + assert len(shapes) == 2 + + for i, shape in enumerate(shapes): + assert shape.frame == i + assert shape.type.value == "skeleton" + assert self.task_labels_by_id[shape.label_id].name == "cat" + assert len(shape.elements) == 2 + + elements = sorted( + shape.elements, key=lambda s: self.cat_sublabels_by_id[s.label_id].name + ) + + for element in elements: + assert element.frame == i + assert element.type.value == "points" + + assert self.cat_sublabels_by_id[elements[0].label_id].name == "head" + assert elements[0].points == [10, 10] + assert self.cat_sublabels_by_id[elements[1].label_id].name == "tail" + assert elements[1].points == [30, 30] + + def test_progress_reporting(self): + spec = cvataa.DetectionFunctionSpec(labels=[]) + + def detect(context, image): + return [] + + file = io.StringIO() + + cvataa.annotate_task( + self.client, + self.task.id, + namespace(spec=spec, detect=detect), + pbar=make_pbar(file), + ) + + assert "100%" in file.getvalue() + + def test_detection_without_clearing(self): + spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 123), + ], + ) + + def detect(context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + return [ + cvataa.rectangle( + 123, # car + [5, 6, 7, 8], + rotation=10, + ), + ] + + cvataa.annotate_task( + self.client, + self.task.id, + namespace(spec=spec, detect=detect), + clear_existing=False, + ) + + annotations = self.task.get_annotations() + + shapes = sorted(annotations.shapes, key=lambda shape: (shape.frame, shape.rotation)) + + # original annotation + assert shapes[0].points == [1, 2, 3, 4] + assert shapes[0].rotation == 0 + + # new annotations + for i in (1, 2): + assert shapes[i].points == [5, 6, 7, 8] + assert shapes[i].rotation == 10 + + def _test_bad_function_spec(self, spec: cvataa.DetectionFunctionSpec, exc_match: str) -> None: + def detect(context, image): + assert False + + with pytest.raises(cvataa.BadFunctionError, match=exc_match): + cvataa.annotate_task(self.client, self.task.id, namespace(spec=spec, detect=detect)) + + def test_attributes(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec( + "car", + 123, + attributes=[ + models.AttributeRequest( + "age", + mutable=False, + input_type="number", + values=["0", "100", "1"], + default_value="0", + ) + ], + ), + ], + ), + "currently not supported", + ) + + def test_label_not_in_dataset(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[cvataa.label_spec("dog", 123)], + ), + "not in dataset", + ) + + def test_label_without_id(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + models.PatchedLabelRequest( + name="car", + ), + ], + ), + "label .+ has no ID", + ) + + def test_duplicate_label_id(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 123), + cvataa.label_spec("bicycle", 123), + ], + ), + "same ID as another label", + ) + + def test_non_skeleton_sublabels(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec( + "car", + 123, + sublabels=[models.SublabelRequest("wheel", id=1)], + ), + ], + ), + "should be 'skeleton'", + ) + + def test_sublabel_without_id(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.skeleton_label_spec( + "car", + 123, + [models.SublabelRequest("wheel")], + ), + ], + ), + "sublabel .+ of label .+ has no ID", + ) + + def test_duplicate_sublabel_id(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.skeleton_label_spec( + "cat", + 123, + [ + cvataa.keypoint_spec("head", 1), + cvataa.keypoint_spec("tail", 1), + ], + ), + ], + ), + "same ID as another sublabel", + ) + + def test_sublabel_not_in_dataset(self): + self._test_bad_function_spec( + cvataa.DetectionFunctionSpec( + labels=[ + cvataa.skeleton_label_spec("cat", 123, [cvataa.keypoint_spec("nose", 1)]), + ], + ), + "not in dataset", + ) + + def _test_bad_function_detect(self, detect, exc_match: str) -> None: + spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 123), + cvataa.skeleton_label_spec( + "cat", + 456, + [ + cvataa.keypoint_spec("head", 12), + cvataa.keypoint_spec("tail", 34), + ], + ), + ], + ) + + with pytest.raises(cvataa.BadFunctionError, match=exc_match): + cvataa.annotate_task(self.client, self.task.id, namespace(spec=spec, detect=detect)) + + def test_preset_shape_id(self): + self._test_bad_function_detect( + lambda context, image: [ + models.LabeledShapeRequest( + type="rectangle", frame=0, label_id=123, id=1111, points=[1, 2, 3, 4] + ), + ], + "shape with preset id", + ) + + def test_preset_shape_source(self): + self._test_bad_function_detect( + lambda context, image: [ + models.LabeledShapeRequest( + type="rectangle", frame=0, label_id=123, source="manual", points=[1, 2, 3, 4] + ), + ], + "shape with preset source", + ) + + def test_bad_shape_frame_number(self): + self._test_bad_function_detect( + lambda context, image: [ + models.LabeledShapeRequest( + type="rectangle", + frame=1, + label_id=123, + points=[1, 2, 3, 4], + ), + ], + "unexpected frame number", + ) + + def test_unknown_label_id(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.rectangle(111, [1, 2, 3, 4]), + ], + "unknown label ID", + ) + + def test_shape_with_attributes(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.rectangle( + 123, + [1, 2, 3, 4], + attributes=[ + models.AttributeValRequest(spec_id=1, value="asdf"), + ], + ), + ], + "shape with attributes", + ) + + def test_preset_element_id(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton( + 456, + [ + models.SubLabeledShapeRequest( + type="points", frame=0, label_id=12, id=1111, points=[1, 2] + ), + ], + ), + ], + "element with preset id", + ) + + def test_preset_element_source(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton( + 456, + [ + models.SubLabeledShapeRequest( + type="points", frame=0, label_id=12, source="manual", points=[1, 2] + ), + ], + ), + ], + "element with preset source", + ) + + def test_bad_element_frame_number(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton( + 456, + [ + models.SubLabeledShapeRequest( + type="points", frame=1, label_id=12, points=[1, 2] + ), + ], + ), + ], + "element with unexpected frame number", + ) + + def test_non_points_element(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton( + 456, + [ + models.SubLabeledShapeRequest( + type="rectangle", frame=0, label_id=12, points=[1, 2, 3, 4] + ), + ], + ), + ], + "element type other than 'points'", + ) + + def test_unknown_sublabel_id(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton(456, [cvataa.keypoint(56, [1, 2])]), + ], + "unknown sublabel ID", + ) + + def test_multiple_elements_with_same_sublabel(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton( + 456, + [ + cvataa.keypoint(12, [1, 2]), + cvataa.keypoint(12, [3, 4]), + ], + ), + ], + "multiple elements with same sublabel", + ) + + def test_not_enough_elements(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.skeleton(456, [cvataa.keypoint(12, [1, 2])]), + ], + "with fewer elements than expected", + ) + + def test_non_skeleton_with_elements(self): + self._test_bad_function_detect( + lambda context, image: [ + cvataa.shape( + 456, + type="rectangle", + elements=[cvataa.keypoint(12, [1, 2])], + ), + ], + "non-skeleton shape with elements", + ) + + +class FakeYolo: + def __init__(self, *args, **kwargs) -> None: + pass + + names = {42: "person"} + + def predict(self, source: Any, **kwargs) -> "List[UResults]": + return [ + UResults( + orig_img=np.zeros([100, 100, 3]), + path=None, + names=self.names, + boxes=np.array([[1, 2, 3, 4, 0.9, 42]]), + ) + ] + + +@pytest.mark.skipif(UResults is None, reason="Ultralytics is not installed") +class TestAutoAnnotationFunctions: + @pytest.fixture(autouse=True) + def setup( + self, + tmp_path: Path, + fxt_login: Tuple[Client, str], + ): + self.client = fxt_login[0] + self.image = generate_image_file("1.png", size=(100, 100)) + + image_dir = tmp_path / "images" + image_dir.mkdir() + + image_path = image_dir / self.image.name + image_path.write_bytes(self.image.getbuffer()) + + self.task = self.client.tasks.create_from_data( + models.TaskWriteRequest( + "Auto-annotation test task", + labels=[ + models.PatchedLabelRequest(name="person"), + ], + ), + resources=[image_path], + ) + + task_labels = self.task.get_labels() + self.task_labels_by_id = {label.id: label for label in task_labels} + + def test_yolov8n(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr("ultralytics.YOLO", FakeYolo) + + import cvat_sdk.auto_annotation.functions.yolov8n as yolov8n + + try: + cvataa.annotate_task(self.client, self.task.id, yolov8n) + + annotations = self.task.get_annotations() + + assert len(annotations.shapes) == 1 + assert self.task_labels_by_id[annotations.shapes[0].label_id].name == "person" + assert annotations.shapes[0].type.value == "rectangle" + assert annotations.shapes[0].points == [1, 2, 3, 4] + + finally: + del sys.modules[yolov8n.__name__] diff --git a/tests/python/sdk/test_datasets.py b/tests/python/sdk/test_datasets.py index 67204e4c26c..35b2339ec67 100644 --- a/tests/python/sdk/test_datasets.py +++ b/tests/python/sdk/test_datasets.py @@ -101,6 +101,7 @@ def test_basic(self): for index, sample in enumerate(dataset.samples): assert sample.frame_index == index + assert sample.frame_name == self.images[index].name actual_image = sample.media.load_image() expected_image = PIL.Image.open(self.images[index]) From a43477af8eea946cf6498a6b1d12888840925bcc Mon Sep 17 00:00:00 2001 From: Mariia Acoca <39969264+mdacoca@users.noreply.github.com> Date: Mon, 7 Aug 2023 11:09:17 +0200 Subject: [PATCH 10/32] subscription management update (#6307) --- .../docs/enterprise/subscription-managment.md | 47 +++++++++++++----- .../en/images/bank_transfer_payment.jpg | Bin 0 -> 34980 bytes 2 files changed, 35 insertions(+), 12 deletions(-) create mode 100644 site/content/en/images/bank_transfer_payment.jpg diff --git a/site/content/en/docs/enterprise/subscription-managment.md b/site/content/en/docs/enterprise/subscription-managment.md index 2dcfca9130b..4d14acfd6ac 100644 --- a/site/content/en/docs/enterprise/subscription-managment.md +++ b/site/content/en/docs/enterprise/subscription-managment.md @@ -17,21 +17,22 @@ See: - [Billing](#billing) - [Pro plan](#pro-plan) - [Team plan](#team-plan) -- [Change payment method](#change-payment-method) +- [Payment methods](#payment-methods) + - [Paying with bank transfer](#paying-with-bank-transfer) + - [Change payment method on Pro plan](#change-payment-method-on-pro-plan) + - [Change payment method on Team plan](#change-payment-method-on-team-plan) +- [Adding and removing team members](#adding-and-removing-team-members) - [Pro plan](#pro-plan-1) - [Team plan](#team-plan-1) -- [Adding and removing team members](#adding-and-removing-team-members) - - [Pro plan](#pro-plan-2) - - [Team plan](#team-plan-2) - [Change plan](#change-plan) - [Can I subscribe to several plans?](#can-i-subscribe-to-several-plans) - [Cancel plan](#cancel-plan) - [What will happen to my data?](#what-will-happen-to-my-data) + - [Pro plan](#pro-plan-2) + - [Team plan](#team-plan-2) +- [Plan renewal](#plan-renewal) - [Pro plan](#pro-plan-3) - [Team plan](#team-plan-3) -- [Plan renewal](#plan-renewal) - - [Pro plan](#pro-plan-4) - - [Team plan](#team-plan-4) ## Billing @@ -44,28 +45,50 @@ see: [Pricing Plans](https://www.cvat.ai/post/new-pricing-plans) ### Pro plan **Account/Month**: The **Pro** plan has a fixed price and is -designed for personal use only. It doesn't allow collaboration with team members, +designed **for personal use only**. It doesn't allow collaboration with team members, but removes all the other limits of the **Free** plan. +> **Note**: Although it allows the creation of an organization and +> access for up to 3 members -- it is _for trial purposes_ only, +> organization and members _will have all the limitations of the **Free** plan_. ### Team plan -**Member/month**: The **Team** plan allows you to create +**Member/ month**: The **Team** plan allows you to create an organization and add team members who can collaborate on projects. The **monthly payment for the plan depends on the number of team members you've added**. All limits of the **Free** plan will be removed. -## Change payment method +> **Note**: The organization owner is also part of the team. +> So, if you have three annotators working, you'll need to pay +> for 4 seats (3 annotators + 1 organization owner). + +## Payment methods This section describes how to change or add payment methods. -### Pro plan +### Paying with bank transfer + +> **Note** at the moment this method of payment +> work only with US banks. + +To pay with bank transfer: + +1. Go to the **Upgrade to Pro**/**Team plan**> **Get started**. +2. Click **US Bank Transfer**. +3. Upon successful completion of the payment, the you will receive a receipt via email. + +> **Note** that the completion of the payment process may take up to three banking days. + +![Bank Transfer Payment](/images/bank_transfer_payment.jpg) + +### Change payment method on Pro plan Access Manage **Pro Plan** > **Manage** and click **+Add Payment Method** ![Payment pro](/images/update_payment_pro.png) -### Team plan +### Change payment method on Team plan Access **Manage Team Plan** > **Manage** and click **+Add Payment Method**. diff --git a/site/content/en/images/bank_transfer_payment.jpg b/site/content/en/images/bank_transfer_payment.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cb4b730d53dce1d61325ff86740760d9227ba2b1 GIT binary patch literal 34980 zcmeEv2UrwY)^>G(CTD~uD=1mX0umaKEYRePBqb;rNs2TeC`isALL(pb_;kkEo!xK${nmM^rnz zxPd1;RsjGu_v8PaE$kfKPdi?6XAwTlcUlBE90ue7CaPm}$0(WTXz7kKGqLih@bPeR z@)*mU7g4!l=Hg^$W_#HqC_l>MW(MZ6ZCrao#-sB3hI+S%*Av~put-2FJlg z;NcSxf;W_t0Z<4G28F|LaNuz8ZeQ>^04K+xIL#%PS}Gf|ALiudR$ftARsHn&iI&7_P-k#9D4s@a%y^Jc5Z&*^Xl6A#^%*lan)SDq{ii?TnZUYgoO*`2?2jRD%t3a(nfq%L9I{JmaeY}j&cXp1R>hMIBROy!cIGH7;oE!rbq^1L?v?}5@%Jb?-T(glZ*TmY4+i4$ zzOyddQA&(%VG(^0VJbiqh4Z%^o)R4DEy?xNJMD_i<%7X)4SV2;n%dsV@KPvnjok;h z`Z$Y=D7=g=bjiw!Hje06vvRX27RR_1iJ@@y4M%-8;=mCp#(B^$OR%C?`HSY*=D+)?>Mm;R`r&bf$0JcuQAw_#7qlniDFdBxySJV)&)Gs( z?lDnq-whVkPODexu7X#Ga+tp4pjMfLJl&FdjyYZT_9?%9+DVH(8X&aTkUgObl{woc zaUG4*BLf6R`H<@_pQ6*-^=v!JKfVc4JqJtD?ma zhW9PrY>Ix6#0b>nm@YFPBOaD;BXx@sr^-bXmy?eYmX=Bfyw0cS=3E<5VO`b0_wPo| zE-s;H7E;uX$9TlZMmIx-%Uqh(0F%Pz6n(=<0jY6zFCH4{9H%0{&XK-i%BGkrK_7U7|Q(vbp%v+T?35pV`MKAUc{|C7+c+Zsrbmh%8#M-Fw-pOpj`8_QDeO zp&{}B)t|nDYN8<69_Z_z4V9k;4GtA#)_Q7ceukb|)42yR`#!xF@0(^3#bDQZT<0deM0>LduGze7yV)t>OGVSJ{ssCdh7dgTCe z2s}X}P%?aYgrdq(@o)*P9vEAwSR8^LBetj-3WQvp(1R7T zP#{Ck26NI1z6^VjaS0w8gQ3$j$h!nDVTta&kHX6@xSx-!u8z=FMG=!@o5>Z%6c8%c z0KONL0Fe?(45`_;k<$ay6Vf7h5P*w&bV)?1S9kaXcccQ6Y5@pqq*P$#KQ8lHC5-fW z78dswN+;-IqJIl`pA2I@cg-Vx%V~e^_P%wXIrOaa5}hDPm<|Q!Ixu=S;hbXUyNI#{ zz*3IFxAnDG3ju;^2e6c3W!Ivq(}8;GVa&1Xx@@b0jPz;%*_-pt?Vc7V&k?QN^4!Co#*{pgz@+p*7D`qWQ#wxDS zhbJkBcwUaLjc&?_;dl1gRgjYP|!1UakD8@LTv+eku z?kQVXNOAWrq1qvk?zHDy-;5D$|9A*IDjVP4JOo;X_iSNd|Goa-{`voSe4KmE@?mas zTe`?`*jVlgb0XY=rWglM{o2y0q@)>xVtR-Q0Nk1ZGfFHDO6wP45p^yoGNC5R8!Qp>=JX7qNiuZJ zXqKT-#Uemaurg^rJ+YmgYNIm5Eoea`HntaYJUlC$#bKhr!eesip{iGOAeVUrLXrb| zoagTGHDyMR;@1IEc5}9X@C|S9M=aMZ>R88CY6w zpYBM*xnL!mU-jNu5iH{I?NL`PwR;yO1qXDN90(~?tV`Rb)=*@~>F9Vg-!_a9>$&sx zcCZ*8Pi@3420t#qprcIAYiP=CNgdz-+^HZc$70UixXo4;q%grJwy1ZD9C%^=VuLXk za;KN=!&%>n;t7`kE~WQMt7!AIsi__~4Lc1vMbH-sNXjEk)!VEyX2BRPe>NqN57Gh` zYu*8GAIjfwL^ihW^uWcUr)~SSY5=hYwwbIL+9|P1Q=SMaAYm1Gf@suLrsalCXoe0K z2AIPEc>l2Y`{N?(ca#x`a(c-$itwMYmj|fHnXJR`&1oL7fnOjYgQP8fx5WcjqZ~v| zJvow_H5gzhlH9#!`alji(f+cuI1uw7!r{xrwPxMN98e>Bk@uXhQS>P&0s&YNd}a~9Y(N!_h>F{mIq4QY0#=unaRme2?@{W0 zHGsv>7mlj|^6^+0W-x3ICGNS3O!-H58v(wGGtNGReVO#xwmF3XHqCM#;u>gFoPe*Y}Y-J-?f{BaZOk30e4zydwC5Ziwg)3iFJE+>$# z8Q~}H3-Br6ihpuT*Ve$|s$|E)`i{;iPFpk7Q25;q>x+XBD#%jPCzVczFkJ8J1290% zbt|ee2-z(woPp1ksm@$RfL*x3+I;9WfZwWR?6M<&x4kcTNEjI7%-<=6VO@QM+kc5Q zEKDa3=&5oTWSay{p(`wx^(6J~qsX`A?11Rl)E%9pdOVKmX|q&h+7Jd z6iVND2w30i*xe=JvIk}zh;W~?)ZAE+^xWC_P`EJUuKL(kkRhDKj1%FRfhE&f_p|C#g7O0=z5Vmb~OMVtI` zi9|2)b15pOw17m}ZKuVk!t)P7qN~dPWt#>RR)nz<1z91-B=ETKZb={G&qoe<%pQ18@Ajddfe=o@%Sy)|mF)W2`sw1uV?W#sm~%2jfr39}jj` zlH|7hfK#Qb&xT|Eg6RQuQJ$Y%6%uqQ8Onhc}XY74LnV@`nT*<}WGFuph zK}oqGnHuTfgmx-Zc_oUv_|>Q3$j2-mT2k60x+y@buN*pBMbUh2HoP-cuMW`cEi~#n zC6L;2yu+&83`?{t=9IR1sVed+>p6A)^~Mz=DoFG-2REA=PP~+_^mpgD%`p6F+;w`k zdtZpij<*r_^>+f=eT6~OB?(nTwb>?EBI}yI7;>jg5dlb<^6;Lt(IA$<$Ta#1=C)XL zm%-a1AYy_Nj)5v`H{@|ZbNz2L5HLQJ^2I*{=*m%4@g#58COzo^Gr_=pt|dUrz%Xz- zUwO4zs`jAV1H(~k@`N_z^4-A7Ix{T6=a%WNY#ogDj8x88;ZJ((pMLavLrL4y2MTJ^ zqhrk&Mzzt8|0fSI-;~_{Htsa|0}M>&Tk@8?N_#!y+58R~{Mb0cqpYS~?U{W?mJoG( zpTHw?A2PQe)~c{Z1%nAR^o{P@5Mb7>5sUc7dLT?I9;_XuAoJ4=tc;oL51)Lpw=C1 zxO91h?!+j6AnI5ktS7M9K2xccHV?QH7Z?gu>l!{JJrUw_YQiJ39m6wagPiTnuMrFp zYqmhJ5cS|Z&6!ckU{$!hEf`(NzJ?`?*MAk=M)W~4mo|dL7TS6o!zF{rpoa74Yv?hM zGbv7^j*07PvQEoHiMq3exqra$1mMBRWR|&dQuIclg181R%pp57nA5u2Y{e$GhmP^R z0Ip#Pbt!9n@A-ThAVyKVy(l>t(NF6t;Xq!kWcd zupeyTiIo$IPZhLI4ySQICs<-2^Zf-#q!|hi>0I+j^aJ>`V#0YtDya|I&I63QXq>0P zr<}x<1}n{>B$BpJ({|d^@2l34Gx^oFGij)oQ3yB@cu#(>_%Y2>hrnn;7XlZ^m>%?ru4m2;G!pbfKutz!wj!H#QP;L1rlKTC|+ zb^S8m1eoOSj+j^^1+f!i_`Fueg&>yS^s#>Z!f!dWA@s}X*~D8Ky%^AcTjJ%BW#d}R z;5v$9fF%g3eHVuBnoWO75aXl!6v(E3eCPiCVR!3C-6lQ+H7MicQeU9ITITyxAd0`Krs9Y+a}fa-TB#s%Nz#IWwnt3$TG zUHjCo&%U_s4)EX-le>J@NR$2)1vTrq(`V981$~e-ws8-r)Y)TtOfGEBez&pnpukkl ziq43x|@)$nv?* z4}X(76GgQpMBZ|0*0&?DI%FzB`r@pf8Q*fD;iKB6rF0{p;FKj7ZK)*&7sQNP5% zSQUa52RtW&Sst~v2zy{SpCgTM1`AuU6F~+QNv;o(?f+FCdSmD+_ zbVh(}IJL?xbWSb|v_ga)q~(*+&JW#35p&$mFS+2pq>hCTRkOp`%s;E5@Te8#8TsFo z^8$QLNc2OYhB=Wi?Kwy`S(Hj%%Y}%z1;9#yNJ02{03V+?cs_)J<-NXWqv#@N9nnsT zwsST#OlC`A=u5q~6?!e@b`nSFp$J* zKd~`cB#t4EH}rTMAc>ca#r1Ys;x2Xe3A+Zmi*A$+>03^0&@O@x{!qj4n~1m_&|!s( zH;Fh-<%=VQp|t$G8X@k|n}DGsSK!N>oYU@4#nstMVpsqXW+KOIHP zpJAF!gGjYJs=JEScD97kt`GDF;S7<>?pT4{a|#p#Z3}j`5RtH0|JZk9V46cN9ITTff)qln*D2egB zI_yUr_fOYw|6yzE#{y1L0d&Ow68!FuBYwYrLPVM3U_wP|Z(+!o>l zhVe(G>z~`r)byQHj=8H`Pu$@fSbWX^gIB@)CukUT_`0W2qz`#YYp9jUlYKA^(cmwM z(hSI`P9;iR5d0xjT5!t--T7L1ss}drm8I~v1nUtz(iwxI^VpRu%pW*E{_Yc&n5#(m z4JzY44J~}SK^g~Bqwgvs59LV@Yy5c;U{Yk)S-&$zK&9@(TBQMwA!XBX!5~ogdn@P0+6D=M zC_^MFq_Y9ze9m*S>`pw73vZQn@o_*mP;W|IqZMm&;nRRx)+FsB zh}`L3Z4)$!sZswN{e=K1{}+5z95p$Q){0!KnPP}2ZjnHNl`{gt%9?>C;CX@yYE&At zOFZrkxEH+29~gl&wzjUIsIp>A*Uk({)Sr|=!e%D(Hg|L@L&azXo5S=oxJWpFpjT0u zj~Xw*BUOC=2?UTI=j>mh6I|ae-sRduAVJ=#HYa!f%^uhLfKzcQR$7ypRw+Qy**)Ke zm(3mZ(*%4C6B`@*T)i(Ss;`B^(h8}pdvLU5$IrjA;*xV1Y<|76sHaC8Rca8e>idxc zi6zjc9V{V?7YlRPv8tTZ595!?YQAkVx~&L`=uXjH$?oKX8rMllH~-K#{29*ER8?~? zT{$2xvcDn@ep3=qq2Zdcte%ee9;0uvRZ-NJ$^?)ur(Sc+PPZ}myv+x4xOghb6|!Oi zx02RBN{d&c$i1cOz0dki5r(1;LZ|iw-UF!yLxLQn{%)P zOAb$wnd1j#v8nEz23R~J%2~G1FGDp-&8{F)7-3M`5?*O#gQJu0j8WKiIAO~ zt>|9;vxflXnH`Wg(2~2ECbOZj!kSg=ba3k8`qm*3sktN(xR5@ksQlBDnM2-)x6-u{ zG1c{7lKtL{A4n>et&-;Zc{84L^ZP7t2;|i7`9#`HG3y+ximS{V0v|3N0vdi_HkjVD zO6BcOxyOTO>&uRVv_s(Yf@DsVul8>1JomjrAS7{L@(`HpyRO0V-#7cNKC7sz-nCX* zQ$2Wut7o;?)01yHaPpSR-T1DU(4NU(v4!bY9+AeCvI7KH+YL@r-$O*@;9_WA(VsP+ zWR6$R4E2wE&&re*2jMe!i)f*SmkQbh=u6^?O^$~G_aol2Z6b4-1jD>9b&YUS2uOFA z0|pP3Y68NnZBhVV)N$@`EUdM=(jG`F*tQNhCYemj${PCx4WIIVH;=AmC4!q5%Mi9q zq9}1kQ$0j;Z~2KV)r)+b1tozV5jJ82PgW%F;CS#J2h`_Z&v5=tqA(|Z!zs+MSlg90 zEKb&tB;v?3Sms08espe~IUe9W^?FVk^oFTr_T1C~%I+y*_i`b2B9SuNXlSnkQ{d`9 z?=cAV?4Eb`Az(7Rt!2is%dkhf;jovddf86(OZ0|`8Y+fA# z{047E*7@sSm_6LkjSzln6Jt>aqt=ncCBbjGDO1$V^CWr^y&v)Jz&GO%sJFzNYTd}a z*>dF&km@-E#!FF@);@jGR^=f3#@25I^ddf%v}Z0I0x8pF(pw+V@Ij_>(t#zv*&*iL zO%CYQNTb^yJ?ht%NcVTCAR>xIa+~KaZg1W`*bfEnvG!-|GM4P^${hkXn*g={sA@lf z!?=iy&t#h=mL6mT&N||??znQmC=aZpH$Q{aLrw5Ir0;qi0)qwVOKIJEFZSx?vK~Ao zrTs3z6yDG#ijF(c{kMzT6BXUBT(t(iTHB$ zhhPn|SGQvDyAY0Lkh7b&_r*=YAyslvXVr1tlOagzZ9VhL9F`v3jk=cfnk}uC$nCkR z?%v$FN{a4T@5eO5mB^)jjPQ5$(2tQE!D|N+vxlH0-pd8Gn@F)kVDX+Gm)2m4)&ZNZ zW+WCAc$0^~JHfpsI?}zH()(`Guk?hrB#E3py~``PbdXWMSeU+pe9*Bk=l9M~dR@>s z&5-Re8-FsJd2XF@w!1P_R%l^Pu0@nBjA`~F!Gy5_Qf4|mY@h2+mGq_Xy?ef$+?-M> z>k!|%g^#R`E!qKV1Uc6ABTn;+XuIo7o1bVOFsUxdmSRwCjEcS487XmEf9SJ7YtujP zlkU6v&BQ5#QyrY3@7NFQP_%w)$U~qqaz7<egzuH5^3gymMmkwu_Qa)QV4qKlRXX$^>c zN`6J{{I%iums=pje_MDpy+KXN4eWZ61D)(ALfLytJDJQf;9zS3$DPe~s1Wm)J-?k9 zzobIDWgpt{_a)k9e;ZEni95mIU61|dE>}vFt~Je`f4$Zj94C+VhV(%c$%LqPG&5e(MfYv?!dJO-uc{ZbSf1^Oa?w&2VOyXipVmBEd_n;tU@pL%+XR^9(@=I#x8I94 zz%g&n$&eK9!P${XC5qB;{TAs5Vy5xs;Pyea^qLAA*r9WNi?N4*5SB>#$6>9w*X*|h zPT)M-|W0!*co`)UDQ*3#d+$4eGB8L3p%y@~Q;rPZ+quVhykG`-yT>(xZ%VW%mvwrQ#s@SpvZf0Ro&+ez}qR zulI7HpiTeF2K_ldp)^lJ#m=9<-_k=FS(2?&eJ$XV#A%}2E9d?0=BNPGn@P3f+%6IR#GR+DKpV?yuX%7KC zY0||`z$jsc!+<9;z&E7jU2k3o7jE@vcWPkAwSE_)<9NFi5}+7KKNyC=UyBUs7H3A??DDYf>(sDCx0GoLOhS5Ad>g&Vh_8BaM4KH{F4< z^t0H#If$Kx!Gu(97(}K@_xzhLU@gbl!zR|C-PxQt}%rZKSI7_U9 zm4Rp{aHvw^ai`vz;~_A6OL}V^w6cUC0&*0NgLJ2ttI&ezZO}3CoOyT0?@J1hBAR)J zoQ?}NsKCW6vU1=B>SKNypgoA9#Iq!=Rq#Iq>dG!>{IV>L02VxQmhOvjZkpadD=M}5 zR*a^yD0`%OJmQ|`As}%a)FE>~s}76*oCA&nFl92@lm4O(iaRIp(C7U%;10CHG{1V} z0zVn{+L8!W+F zJ6ym0o^*d-dQS~7%pd#t18<#v(TrpEmA`y>LSf@yzA;Wp(Jx=~*PrKd!OwdVti0o= z%DF*3Wlwse?GQ-mZKS+ycgeh$uLXvY!~rb)UfCRsNpIRq&j=m@PRzT)Zu?IL4}m1V z#5wmkeVNIx%dbBQ!~b2SBq0$Dp}!Za^Lv2h-?#Q1|CzQIkGaIvK&F5vIWK(8GUK!% zr-#^e)ts`pHd-x}rR^YhKkEw6n(?Y%@i)wOxY{*uIp1ld$$tn`Ec;c{O!+leNVPeD z=FexpacDYEP4ce2bgR`N;4pFsxSnY}1j4WS#ta|$emDf4;V?X2W*UlI1((2KPwA1n ztJ0Ik%zrCgk*DT%Mdn_x^yX;y)``8yFa0fpy!EqVXLctvJ5Ik}$~^>*NA6d;?YGvm z9tf9isyc0N-97k1<=Rno{l9how{MR|==Y4FuHouL%?O2NGxHXvnWw_#m3l;v&f|%} zSw6%(TydSgdp9uD6yLT;{a0E9M4G?|xUdVlNB?i^zGg@M-eOdk2=8FAV|`A!3RyG1 zDcy_V>7(d^+c?V6cI36Ffb{BM1v>-EC~BoWTd4i5kzCiBi;#+h7N7_ImOKd`D!|v} ztj=c(H7Y*+w&H^r$@EAqiJuWrioO>&L{K22?+L?V@xky52t#1&x425bLiGL#KXaGa zRx>U$`jD2`+nEli4VuIh$crgutqt>%zhJnw{_w-gth>V6OXp2)yw40%FbHN^cdaoI zi~nexpXc0tjn2Vc>sC~Q+Z(FCoHfd|uEOUGW36!*4PwkQtW&&JDvUb)Iux#Nxmabc z+5RG#vJbjMNE*L+Y>1heQ^@tv%=rz2v1a^sWy~(2RoI(p%NhBUz$Lr#5PhPeN(+Wq z)%eI{%D0A2A}?5Z9P0Sv9mn?Vaw|Tvv&DDl8ZRBUwmN=^z&|ufbap60kD>L6?m<6R zWIdXo_#+>Z6+9(nZIEkOWypN_otD1yh2*^eR+~s?EGM8zI ztg?Mt=Y1*(4nv_>w4_dT%`nw7v}>!>7bNgT^QyY?v4t8_E@$0)wm1VMG>eMd%mO@` z{aF_OT+8}5tz$55m+m~ZsdCq<#;nE^NDOkZe`%53MTXsW(G%MMkxBv4igO}vQ3wqc zKdH=BES}l0>!b1TUc~%-gtW@Nbj@uv&b9KF865#K?B~2^#|qsRKwLBQoNwC2&ii-t zbBq#yxW&r4N?kLw_)Xnen4b4hv0$*kInD0eBh>SUYIxa?O2^5X?K{FlQ+Gge*s<87 zN1?qxlxU>x3zEGEmH&etQ6jbK|7!FCUJgd%k=QoXRI${Cs+vdzrSh`w724Gg(xU~% z1i8zkxJBACmecZjWp=d5mqz0TpHF@+&~f9NmUpjo5m05TY0opkX)?%69(pmYU#+J! zeWlmRA-$&gLp7uQTZSP6lcysk)VU#($3yUfoE1&d$}MAKtu&PidGQE>OaSfy_=P$1`L3PVmEC~&nr0b%X(L2J#!PuTU|RU({IrI<+ZHK zE>k>EEoWO0nZ+&AeDxxpnyJZ1cG991IZo@;Ui7-_MKnT2URmaUabEo#b^2Zy>rcE7 z%3LcIceH&yQZ@-u1iBy)Sa8BMtWyDHNlXFYbc6MPv(4Ox01VP0l>z6P{~QOzjt?{# z{_}3N{ev`!ANYk}h$GlxAmQWN*P5a@CVPqrM-MP1rsLA0r9WZk<9a27Lsrt z<>P36#Q0F;af4b$GmvB(&;h7~a4om|*DV4eOPb28Ux;bgcqk4NPNPWIJB6s3;B3^`fiP0gp& z@Yz)?zFo9REQ{BG0Emq-qxXwpXX0mww_D`Gc5EJk+R^PLXYXhFNoQvnNd)K9lCk)? zfABMc>ML2UWS%Xg29Yg}p?~C~o%?_A5NP84Dmz5Z3sRoQ!W?x7aK&L7f`Pz3Iv}gC zDKm(TERsWH0@rx0z=_C}LqD9B1e42x#vxw4(C;dgEnsj7)!1^=_liuWA|TEUX@%QR zK@^uXHgf~c3AqZMIgXlOy1%R22$!kEixP3WpDDJ1#p9)Ym{N$VlEI>zt*c9b#~l3UK_8;RG8i^er`ttu&k&| zF)V>cB@IPum~I9CasOfe^;xlk2eb)tEPo-I90);-rp^u*wZ_L;RM8$hD3Y-KG8Q@@ zhD0`E3AkN2m1fptMo+ythG3XnjMhJ|86we0hmdY6vsuFu%};q-kn~2_=&pVeeR0#f z2nLtG3({1Y)nCV=sM)GroYPFgG_wX|2rKv8^!Ab}X;$C3h>e!n2tmy7v%R$_#>FXT z7)TVIx*i5l(x0nw@|1|;2YuT=3!wfAUim-5lD;|XaCjB?P|?=-ToI5t1$o0Xyd2ZV z4=OWQ7|?Kh=QU$og-|g8#m-jIWpSMqA}tR6kfX0AiY2IsyvuUJT5&|-VwAc!YqqhM zWi(X6wp|>G^+!<`vS|`RJE~e`&Q)FBEHE>&P-uF-n#l+v%t-|Ir8pXyoKkTGyFA^D zWI0|2qwa!;$uY6LIhYW&xK|`==UQpyv!%D$(~2|R_h|gKO1o!Kq+dayf72fbdg06y z1g7D4Os=85^JVI9=(~!X>S~a3D(S@tam92zow~{ft0FT=OB>CFzCSL*j-AQPFprK| z9u12-fy5hCw_g_$YdF%ENU-cvy2F0s;D(+tPfYSEnqtm`%*z=MW1vBS> z(#%cI(>GVZ)_LR$vYa0jq`iA%r8d=N@hH8Ia9oChjg;OdBxHgAh`p|v5Z6rjjP{xH zOV1CO)7Cafq-1`awNSzeYlTYm{g>7=m@|Knr}`5>MiirSmwp&IuQPMMf7hNXh6d(2 zi=MTyLh6=PtL82&An{nX@}IgtlsS!J7V+A8ATYyqU)|mKvM$mR(bwljmL)PlO9BD; z(W+<~KR!S87yxi>L z#?GuB{AF8US5!kcgvkLj98p&UQv75q-e0=g%m%5Q)sL#754ej&bHHgH*(b zK49VRgXaDT7VB5WB`6)fgVYhl@oz}#_%kUacv1H|huCN?zD^(^$wjh>Y1{RlmZ7Q9uuLTcw!(f)0Z~fN2E2 zrm6iw-RVci{y*h0mETCS_)}C0s4-f{vRNapQVt_?@8a2bI6F63l!6M;DMfwLb{RyS z+||acM4(t*HacR|7V2T1c`EsI55#6Lu(1=6G~jFP{=lz0rofREKO95o$VEB{x=stE zm-~((#TqHdNGHcIygGQQQOaTiB!Xj@Q(nOkw)aR>CuiuN-ek;$b}9s3EyTv~ zk+KT`OD{ae_I-3Nh}0HlKWT9T-Gx4d1`d8)vK^JhA1C&0p`B-)T6HM9Z>*t6@}~#m zDwJRZE;~Iqp(Rxge!LwVz^uphqo2jAj%x%`3ai~5nwDKHh4DlmT_i!o#uM-fzS2NH9ov;x_&ZnsT!O&5v zumA|_h?e7~5Rjjt4~_LK1#`N1UZ(@Do&Ept5a-Ak!c}hjDOmEmNf`eUC)Q7T!0%uWY!~u&2|xmbIR?=xc_IlqkpXLD5y6o%~?$7pYI1}5!?A6p|L;p z`@hN2Z+(8~5$WwHZ1FpzU*67t>8T^_>w%VxGON%mM}F6H_VtlWO`95*?~=*X*>Rp-@PT~rLv z+5o&Faj`^;)n_voVn^_GyOd=Yl*|ShK9uLqme@F2N==KG z4MXIDT3}AQVh7iJPg`C?c#OFOPt1PKyzfDI>uKE0ch?doh!j(;_ETrqio|bpJgUCv z#-E)gc9t@x^O?>@tr4ER3##TTVc5?3+cZu3XEb=qppmC2uN#(KAhI@(UDTS|F3d6t^Y%_vMXSd?oRZcY(KWDCy$-gqZ&=_X16vx`-sTN6|;Ui zTX7M6vDJ~+j5Z^Fuo)Mf%d{<|z*!>+dF)+MG-3X@T6Pp)JMha}#d5?)kvDtetOYl9 z`Yeiea0YV>cW#uxA2v@+M_m??epyZtXjFuLQ|)3C{$Win@=SWtC(((1-bVHd6=A8Y zU5(G~h1=$_pQ@qk%qZB;aH_mHJGg%c@Kzjn70~Nnenni>e9rRmmP+6GYtgVwTU83BifZRR%50E z-1bfcJ)jzjt!s763}0Eg(`YE5PD-qG5BBsFW%TrEnrSNG+`bq!la!CqSLY&fr*E31 z>S!4XWhL8O-d2f>Bf5OAn*Xuq)vR4DipNGBgjTrnvug#NcC5$4M(Pt3@VEpci`4FF z>sMdGv)0OC!N_c6P@Uj915v;+jAfLE5^z9B!3f)1qi&wWgDwSQ64lPX>5)t{F*5aA=!%k}RgY zR_kl-FmJ@9roT7@iu9L-8YLUtxUVihOMCt}QE+78R$h}`M#|k;*O??su|kHW_E!m)s>C9nL^JR62;yRDQ0Jrk_`Di_^ zxD+*lCN4{rugUPq)C(Y%qugV+exoThyu3mjrZO!3}kKE^c%lbnzDZfPaMt%UP<45!H z=hCe$L4#}4+WX-^UKP+%LTzo-xmlT_IX!D2PA zvG4Rml!rbnob$@!Mw-6r=-cPAb5K=gcr9v(wqmm;pR?^#ZXUCyk^gwXH7lak_zHLG zfiCy3s>=!bV6{80#tEX%jIiQ*+Qo+^;({Y#VJ<(isH^T)rrh^+^T^dt%_bxh$90DB@ z*H-3H6t{6I!lte>+`LA|z1?^R;;|NaQ4aKcXrjnlyXdF60HhezJ1vvaJ5oI&QxSg8 zG0ZcZlcEwffld4`4%YC~lc;A)RDf8Y35Jc$+++st8huHTwe22wn!Q7}q)T{?biCYj z8?6(sK1)CqE#h4$knS^Ca>M;0*@a}7XtNiXVf?FEK>r@!=2eK#{UMk^B6Vs0v#-K%Cy908P zDLqTGXO9TT_v*sWNP!$?_~=JF?<0QlsTC{L?jug}+3R#>z95+yzh1Crw9_v6BMmf)<7B{_C#`lmEdMytB`7 zz`BkFuKi4f2DDH4ou+XX&{f{&fNUf>6$n+V)hOhDF5W;+gcj>@XKLk+TrAT|SY@-w zPIv6v3G01Am{`CotY=|AO=I0z9&vNNQ%a9UAsd~r=~S+UQ)_>qysk1C3w~>>d9z3u1 z2$lP5!~Y|-R!Ag|OR7X@&d7(yJPsDMb@kWdD5#Ti+vWspVFnuH@DQ~ByBP*uVugM} zQ?A+85N+46$$BA379J~0*tJgUCSO;c*9z&JnzZy!`mDgbmfYOM)8-9ZxQ~Svf`9;fiz%ZGY=9 zb^Xgjk>(j{wQR_o_Y6j;-|K!rVeZRKB5tl;Tb9}oWA!UGv0P!^SYmA#f*7J)yahsn zh9K&aGoA+YCF5sV)~9+9vN3GQ0}BI-fhmC+@~VCb!ymWV;;{JaX`)nRw`q|reruhV zhYx{ldqJK`8F3(^zVEc1JTE#hqrHYWmitd_UQQxmkr5x_7z65wlh3|iZa|} zR0G_taxUl#+Z2opy=UHPi#VnLsmGmrjuE$X7Ast`tDK7!;jh-2l=^@X5&*iq3d8je znJN4a$Xl(#;-SEA6OasOyk*hPgvbm zO4Wp$9#Qbf;^}ZRFyDho_@%{licnaD9K0+Plkzz5Npn%26VB~^tU5hn98j``&w7b9 zc6?nr`&dPfR5(vB2Lvk&Q=B>F6wl2ze6awd{=9D2K~0HO-iqo;QOy&ycS^ zv69-(db>q;IVhErB*%ptuH&>}!&+~mtNfv9|1mv(I?us`G^26-qQacWQ=icK1GBvN<+n`X-O-)wHaid-~drR{8y?m20k3S2knISs@CZ{F=?`_6f;6dRq@T z#$8ou5rY^otop!x=y8woz4rD=uEyJ?h2x}PyhfnkJGs4}3-QUyBX1u3g%|oC~12T3*JsF0@L{q5& z`f#0fh{9^W<=CZyc&oQ=p>=u>UsD8Jw$>AT1=PCg8jE(h8M%u`V&jGTP*4oezyMK2 z+ft!n-&hP_Oe-?sarYDkG)^+dJ1vmbT}9&<@P^KJTzwf}HKgtqVE-0yO-#Agl;-(x zu5EX&CT?H9bc_0J4~|WxaglWE4a{f%DEelN>g8N+T<`&`MJ zElcj>vA-vSIQOJc;y}nWC|b{UHU+hm!^eU{f035j19|dFA3>wGJGO*f)M7>cV?dK= z!VITXwENa5?Q!KB8{J>}14`Fc9{WHdd~TE#pF@u6?1mh4IbOVZ_H#-(&dB^#JUl(N zNU<(s7QgUwrDR?2v}>>KxQrs(qXr82OMhz4=dq+D3Jaewif70uwR$f zmIXHyLA|>#$B}XObDrV1T(9h)lY;pr7^DqKj0|wZ{pb{dKn;g8mklry2gvpHy!p|f zJ7)bf=k?UdEWE*_m=Cj^S*~+?irKcH`0#FwiHPR~Q2ibs`K%a~7p|wKq}?Adxcl+5!=(Yf zm;6kVvFfAZxd-effdgFieaQq{fdXR;&lNS)0VmQhK_$QS4 z(_6icasA`T1%D!K{fmx%%ZCV#34YTMwjct*u4TV?S(=rDkHmx9kKDRTnARbJYIJ!H zix_9`70pVqAyO7|GR5sh6vc{ZqbE@ss#6yC(uhu!lB9uEj$=BQQiq)gPn2b{r_Uvd zl`!zlZqD8o7Qq(!;#@GQ4LOf4Ucy4kf*Kkm);A~jcF^zxONY@qkDb}jVBto1;MSAC z*a1(_22OlsvO(MImd3@K!=>OghbId9%Qohq9+xx|%n^CQ^ra^a_1QdFA~l@+W>F** z#iOre!zN?VRBWdq>}diWeUTbPfX2+j8Lb9tq(q;M=Q#*~ITtyX{073T#d(-$ttX7Aj-XciW-oDxLZ{n8dAAnIvgKm+rIzN@ffqS7Mb zdEno9q61=Isnt{#kDGkf6jApG48A%UzYdn1C=>QLY`qOEcrU534V4*qa2*8ESix{# zY5uE_)db=glItKXC0KW;$$;MsOBbXYp=!fsRSot;5ujd78{HGA$>fC~YKhX?~fERE-{6j!;4)VS)1_o7RyZiuf&@9{PhTBLVJO%vjGk2`zjX2{8HJu4+ z6+>in!xkfbfJ7hb@Q(P!ZWVv3unC4qAZBc|<{B1%@8w{<&(cOWH5Au`&m?sUR<+64 z3exZ1Hp;eO1TWwjjV59YnENs0r^zasFX<0g#rEieUrZj917X1fQ7L0{{Glk-?jl)`BiE(b?V%ezouhmZ(xOlAC5}BR%kD9kl9G4n&^4 z`K@jLh&RE!+dZ%$gLjVeRF1pVk~iZ`i;Sly_=d|O`D5!V!DzXemg>Y;YV86qBqEE> zTf_%^*VsBL zJ*0{`>!Azb-;(K?TyFufICVT0@}6FW8dlwP5Z2E93VZ#xW$XX))!$5r{!Ogt|NMpD z<2!TRZ_a^JP@PM@ZJL>_bt|nkQM72#^+|n&`N32@$i}C;*<~8E#5(f2aAhe;y86tN zbaSN5>Bj$_`EPIhcO?9E7Toh&R^2B3oH^L>=(^IDki95-#0S356^g5Qc17)Kk|7*f zd|zjL|C_8%zX{IE^#=B0#I%&fB-Kv(u%}XK{|Z?D&7U`1B&r9ho4B*ii3p_5y{>pi zD`fV8UL#bHd&va0BIf}w7Kr=gg2I>Q1G^$8-%-nxqs7u&sQ4ICBSDaI=_=)ttS4@v zK&bie86LkfP;e`5X{_9KO5EwSHN6b}by3j6KU2~(pV9a3L3IS?|M!of!u?ue;z82) z+W-(t=$J8QSgrM1#Dffd_uaydbX&#f=I zrIINnJG=4qr@Zq&LQN-LNsMjyW@52XQ=ul`y13`#EU!&0Bn8Xrg{8CqG5^iDunTy) zE91YdXX6)Mc3!#OwOAG{Jqu|GJ#y Date: Mon, 7 Aug 2023 13:51:29 +0200 Subject: [PATCH 11/32] Analyics and performance documentation (#6594) --- .../analytics-and-monitoring/_index.md | 6 ++ .../analytics-in-cloud.md | 54 ++++++++++++++++++ .../annotation-quality.md | 29 +++++----- site/content/en/images/openmenu.jpg | Bin 0 -> 728 bytes .../en/images/performance_dashboard.jpg | Bin 0 -> 23234 bytes site/content/en/images/viewanalytics.jpg | Bin 0 -> 70692 bytes 6 files changed, 74 insertions(+), 15 deletions(-) create mode 100644 site/content/en/docs/manual/advanced/analytics-and-monitoring/_index.md create mode 100644 site/content/en/docs/manual/advanced/analytics-and-monitoring/analytics-in-cloud.md rename site/content/en/docs/manual/advanced/{ => analytics-and-monitoring}/annotation-quality.md (76%) create mode 100644 site/content/en/images/openmenu.jpg create mode 100644 site/content/en/images/performance_dashboard.jpg create mode 100644 site/content/en/images/viewanalytics.jpg diff --git a/site/content/en/docs/manual/advanced/analytics-and-monitoring/_index.md b/site/content/en/docs/manual/advanced/analytics-and-monitoring/_index.md new file mode 100644 index 00000000000..3f054436977 --- /dev/null +++ b/site/content/en/docs/manual/advanced/analytics-and-monitoring/_index.md @@ -0,0 +1,6 @@ +--- +title: 'CVAT Analytics and quality assessment in Cloud' +linkTitle: 'Analytics and quality assessment' +weight: 14 +description: 'Analytics and quality assessment in CVAT Cloud' +--- diff --git a/site/content/en/docs/manual/advanced/analytics-and-monitoring/analytics-in-cloud.md b/site/content/en/docs/manual/advanced/analytics-and-monitoring/analytics-in-cloud.md new file mode 100644 index 00000000000..4bc28cd8e26 --- /dev/null +++ b/site/content/en/docs/manual/advanced/analytics-and-monitoring/analytics-in-cloud.md @@ -0,0 +1,54 @@ +--- +title: 'CVAT Performance & Monitoring' +linkTitle: 'Performance & Monitoring' +weight: 2 +description: 'How to monitor team activity and performance in CVAT' +--- + +In CVAT Cloud, you can track a variety of metrics +reflecting the team's productivity and the pace of annotation with +the **Performance** feature. + +See: + +- [Performance dashboard](#performance-dashboard) +- [Performance video tutorial](#performance-video-tutorial) + +## Performance dashboard + +To open the **Performance** dashboard, do the following: + +1. In the top menu click on **Projects**/ **Tasks**/ **Jobs**. +2. Select an item from the list, and click on three dots (![Open menu](/images/openmenu.jpg)). +3. From the menu, select **View analytics** > **Performance** tab. + +![Open menu](/images/viewanalytics.jpg) + +The following dashboard will open: + +![Open menu](/images/performance_dashboard.jpg) + +The **Performance** dashboard has the following elements: + + + +| Element | Description | +| ----------------------------------------- | ------------------------------------------------------------------------------------------------- | +| Analytics for | **Object**/ **Task**/ **Job** number. | +| Created | Time when the dashboard was updated last time. | +| Objects | Graph, showing the number of annotated, updated, and deleted objects by day. | +| Annotation speed (objects per hour) | Number of objects annotated per hour. | +| Time | A drop-down list with various periods for the graph. Currently affects only the histogram data. | +| Annotation time (hours) | Shows for how long the **Project**/**Task**/**Job** is in **In progress** state. | +| Total objects count | Shows the total objects count in the task. Interpolated objects are counted. | +| Total annotation speed (objects per hour) | Shows the annotation speed in the **Project**/**Task**/**Job**. Interpolated objects are counted. | + + + +You can rearrange elements of the dashboard by dragging and dropping each of them. + +## Performance video tutorial + +This video demonstrates the process: + + diff --git a/site/content/en/docs/manual/advanced/annotation-quality.md b/site/content/en/docs/manual/advanced/analytics-and-monitoring/annotation-quality.md similarity index 76% rename from site/content/en/docs/manual/advanced/annotation-quality.md rename to site/content/en/docs/manual/advanced/analytics-and-monitoring/annotation-quality.md index dd8cee14549..a3d613c06f7 100644 --- a/site/content/en/docs/manual/advanced/annotation-quality.md +++ b/site/content/en/docs/manual/advanced/analytics-and-monitoring/annotation-quality.md @@ -1,7 +1,7 @@ --- title: 'Annotation quality & Honeypot' linkTitle: 'Annotation quality' -weight: 14 +weight: 1 description: 'How to check the quality of annotation in CVAT' --- @@ -187,20 +187,19 @@ Annotation quality settings have the following parameters: -| Field | Description | -| ---------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| Min overlap threshold | Min overlap threshold(IoU) is used for the distinction between matched / unmatched shapes. | -| Low overlap threshold | Low overlap threshold is used for the distinction between strong/weak (low overlap) matches. | -| OKS Sigma | IoU threshold for points. The percent of the box area, used as the radius of the circle around the GT point, where the checked point is expected to be. | -| Relative thickness (frame side %) | Thickness of polylines, relative to the (image area) ^ 0.5. The distance to the boundary around the GT line inside of which the checked line points should be. | -| Check orientation | Indicates that polylines have direction. | -| Min similarity gain (%) | The minimal gain in the GT IoU between the given and reversed line directions to consider the line inverted. Only useful with the Check orientation parameter. | -| Compare groups | Enables or disables annotation group checks. | -| Min group match threshold | Minimal IoU for groups to be considered matching, used when the Compare groups are enabled. | -| Check object visibility | Check for partially-covered annotations. Masks and polygons will be compared to each other. | -| Min visibility threshold | Minimal visible area percent of the spatial annotations (polygons, masks) | -| For reporting covered annotations, useful with the Check object visibility option. | -| Match only visible parts | Use only the visible part of the masks and polygons in comparisons. | +| Field | Description | +| --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Min overlap threshold | Min overlap threshold(IoU) is used for the distinction between matched / unmatched shapes. | +| Low overlap threshold | Low overlap threshold is used for the distinction between strong/weak (low overlap) matches. | +| OKS Sigma | IoU threshold for points. The percent of the box area, used as the radius of the circle around the GT point, where the checked point is expected to be. | +| Relative thickness (frame side %) | Thickness of polylines, relative to the (image area) ^ 0.5. The distance to the boundary around the GT line inside of which the checked line points should be. | +| Check orientation | Indicates that polylines have direction. | +| Min similarity gain (%) | The minimal gain in the GT IoU between the given and reversed line directions to consider the line inverted. Only useful with the Check orientation parameter. | +| Compare groups | Enables or disables annotation group checks. | +| Min group match threshold | Minimal IoU for groups to be considered matching, used when the Compare groups are enabled. | +| Check object visibility | Check for partially-covered annotations. Masks and polygons will be compared to each other. | +| Min visibility threshold | Minimal visible area percent of the spatial annotations (polygons, masks). For reporting covered annotations, useful with the Check object visibility option. | +| Match only visible parts | Use only the visible part of the masks and polygons in comparisons. | diff --git a/site/content/en/images/openmenu.jpg b/site/content/en/images/openmenu.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c3b38dce404078e34f216614f80516b0f4853ba9 GIT binary patch literal 728 zcmex=``2_j6xdp@o1cgOJMMZh|#U;coyG6VInuyV4pa*FVB^NNrR z{vTivwh= zDOELf4NWZ*Q!{f5ODks=S2uSLPp{yR(6I1`$f)F$)U@=B%&g*)(z5c3%Btp;*0%PJ z&aO$5r%atTea6gLixw|gx@`H1m8&*w-m-Pu_8mKS9XfpE=&|D`PM*4S`O4L6*Kgds z_3+W-Cr_U}fAR9w$4{TXeEs(Q$Io9Ne=#yJL%ap|8JfQYf&OA*VPR%r2lxYE z#}NLy#lXYN2#h>tK?Zw2kQ`#dB>7~SJG?!L0c*Xh#Xz{&3W=H2sH`;{ZfGwznt5}k!I bqDf1d+gM~0u0E9r;5_-&_GQKb{{J@tSnc#r literal 0 HcmV?d00001 diff --git a/site/content/en/images/performance_dashboard.jpg b/site/content/en/images/performance_dashboard.jpg new file mode 100644 index 0000000000000000000000000000000000000000..174aaf066d6a2b8e7314186c24ebd6ffefcafb47 GIT binary patch literal 23234 zcmeHP2_TeP`+whAjBMH0v1G{-vSnum8B54oNF{sSl3fUc5*qtbO_mX|rj%WxQlw%M zrIHk~6tX4z{~fpc_1$~x{_l6Y-S2+i*LdeR?|I*|Kj%5;{C>}w)$Y|kfYA_VfCE4f z1ni+dVD%;7qI+b&3ji1!0}=oLPy+~%8$d!n5c+C@Jpb_HKrsNm77l|R$^wuJFhXAw z=u7$7Ltjdqwf`Rb_PhH?yF2>`Y>}3cRsvR^0XhI5D?2wk3m-Qp_Xd7GAxT3SNpW$> zU0Pce4S(37N`r2!Hi|QJxD2agl8#YLa zO74`A*;y6n7FhK!e^wg-CTbWSu@??91285K&IGPD0sK(ekx;8p8s_5%guxL=DirlP z8d~Uu3Pu11!r?Fk9En6Aptpme=Kz8U$-F^Ui;Bf;A4<@ZRW2+sn_5V_>^htIgLlI6 z4qoBwXxKS8xwu6(ii(L#C~Q$wQdUvb*{X}v(>E}*u(aB-)7r+?(aG7x)ouR)Zy#Sj z|04l`5yy^4o`{N$NlH#hO*?h^4Dnn}Zr=I)g2JN9SIR3YtEy{iZ`^FUb-TIcPHV@* z&aOv~yPrIL`Ko_l@b%E}$mrzM^vvx0xexOTlzM>x{Ksnjqh`OT7ZX%37y?qkqjAj5|#JtHoUr8U6P>YGq)hvC^17#kV2P}!_N zdCKRmGi%9;0Z`iCoiDTjT+|{-#hni4M7VMGu{U^XY$bSf$uChzzv=LP9#2VdNizk6 z@2S%}@D8aNaQ*qS@X$Q{G}-1WKWGXi)Dj4=pkS4p0=l?S<0@L&=soOk{|zR3Y{F?~ z2CtL$%B4DJ4eJ@1B6*7Qi5=tOib+w_&gP`>V0yblno7*$NjSNJ9m0gk52i2!Kl0T# zf&@WSJb#l01~X(_ePT@MQY?X;CCx!*y%G4P4I=@#1zjn)9V=|lStWN|c~t(z1)HQN zP8l0}Mu%mmGIPjeq;+Y}?PyldXyL?8&*K0~(t*%Rx`W5)`if8OMA7B00t>@1wUZ}L zdUA$Jd1rKtsp;kOHmIgvpcSz2#CVvzoDM#cQ@}h(L>C$l9?`&s-bhn^B=$&w(~1)o zNqw)3Xnk;BV{C3l%O2TL1I-E_>n5orx@(MsyD&1EH6&`Mt3+hA0m4PUf10&GO@EGU zbGM^xYw47lVVzC)Qum4_C`pAQDCBwdywux%I4}{{1^982XmC&YD-lz-Uu;&MyKgHi ze(K(knqPmV#C-iTrn?tf*axo;*yU_OA0}s?vflILeOQNIX6iwr>0^Q6+=3>6^tG=k+7`V>g+5@Q~IJ{E^lLIW3tX> z>tocIsJ^A+4XGwAoZ)(#TJqQv2F^-ckKT}&huLs2lV%mjOjSsGBdo1{x!|@;u=sOD zVdB;u58`_h@*ED=Hr(qh2CeL!6R-4SrYL#l>Kp`WJc=H`8a(Fa3j6Uw6OwJ+x2@l2 zSNyO}G$^65PWrs*y%4LxxZnf6QIZ!0L?r>t@r~mpYC(_12+fyH_ZYth+5*wUYN^}r z-j07z0pnrCXINUtt$f$hO{3>yn)kOGS*2Cr91IT1-Z4G0G0sVIhmJ3u2`hrnDW7X* zc8gQfd8>pW2^rJ@!yPETh%W5{Gcp~GZTOAv`usPiGou(Jz{OWy4xIU{b1n_fBJ}9j zjzS7h!&hz!Cce?7ME{%zJAtLu1z1hN@6PXO0~iewP;?QetsX7Xzb_fZ%e5Yd;0D^_ zEot)3Cy84gzo~niCoW`xq2RW=FUCNz`D!A8 zq244o-thF(LK}EpSt?8(?TTIuA|nirT49Oew9*Jd5iDCnewyFxp~eBd%X$e_X9RW# z)WQ_Yc;#;gF`TFXl*an{53(o@iB;21Go{bHpD>py+I%Yfg^>LE!nV*1K#zAppYTc? zSa;REvFBQj==`hH5=U9dpmd}uX}~p{T0^4%EH2a&6w+l);&^`CIG-nQI^BP^aKjbr z(~5I`JeRHG{2et=6@WlJ4kM^0W_qt55D-!ySO_$yWwn^PBVZoJb>t>qww*8YGzr$W z!gJ$>bP+hZ9yPrE&3%KzV3^;0hdv3&o@`tKmfc_n#K4B-K+M7nU`|FbbX+AQr0ga! z-^;s_+1-+F!0^5&N!^&he0pe_3eSBCXtGHO&yhrFwDeD%r`?{g$?#@uzAMj(YhK=1 zhM)=?TlBsgX- z0a!i?98$&en2>&UgJ6YpSRM&+Ukcc`-wl-ChFwRr1&lh#Fc(ODDSUtU?`#CP!ms+? zOpNZES9BH({Uc}2)sKo?3rZ?5R0QlXz=uJaohP{ zxxsE}+nn?T{T+FSVFzNifZgHUXs@PKKov;Krt2H%E7flfnE|(Gm-e_zc!$eABLc*8 zy|><%_BOo4GIx4jK~FRaRjP)@0tTk*>;~+;V{-`}q1-@bcJIJjq`%X2oHA)$VZpNm zrGham!}m?Y&-YA>>5jZ3JVnc)dBEoT!x&XtVEb8eh|f*+9~e`61Q{8u`!4`$uV?r( zw&X`s(@Tg839zH%ZGlkAyRo7+_RLM{jX12iT&SHXB1?(1G?LKjOnnBgxIxo+f2CxDR zl!UxM_#^y!N@1Ls_3{89y)XVz@`q;f$8F@N9sPb?sp^6rRkSyxJuYvelPlyqW>Emv zarliDXW3B#ImrZ8bNO_KEG&I@UDcu8z16QXu+#vrxEm;=0XgRs0DKJGm-7wu?s2Qd zA}S4}DxSNbKqiJYh|Zz_CeNJe3`mbx!*WT~O=652OdA8k%zq{P-{%5T_%4bdiK7U9 zBv?G9=Rk|r8W~Av(?gB(&ikqzJL%%O4pX`x*b%wT@krxSHcQloJI}+0_1+M#Ob4!S z!*kKpY8PGi7i&fdW~~JX=|@G}6@9(w4qtv@p(Z3KWTYA0;(DfR8pcD8EvU!nP{WF~ z7sX4Yui06v2&WBPqA~HTp4DjJu3CE_7^%X%Q&{4*iFRb0N#IQX$bI9raG0Eo+k8E0 z2n%p>vL5Y?m?j}GM}z4(RqiwoeYKv5vTHI?;#(S1R-VxqhS|FLN|eay_%xDGW>}hH=}Ne*7}gq}EUG;IgEn3)FD2#}bMAl>%gw&I{-p!6fY~C;(HFv@ za>zyp*>-Z*C=k=D7$X+U$6qBSTB%ksMGXgLWqL) z3Tu`-{oQ>Z5k5#=pRL)(hHytGX6gc}P+G*&*g0$d?6o1Wme^n5tm`rB`B+4#^k2+S zPl+krRH4+svWnkAzrkg9(XO+dD?X?z(p^&bM?+QNPfi^u5k7<3csgD3q}3j;b663t z=s1S48jLNqq?KZ|k9Mn;R#+N(x@SrZHfX-+7&0;LTpPFksO!ZKWaLv)mDeT-=Gb;o z0oTm*tVt5>?2dIrgBo9qxX8{A0TWpEBAz5|I<&X(rsz5rEBfZnE-$9Gy+u4{J0G;5 z2N$wy>W$H1P(WqV83uz9gWI>n?JBG`Ix)9!o0kCH@Wl3q^#;1N=iGB8TN=gDzCe6P z&9WG5*6fEuTGc@o)HOMkg`pW%Ft9MVZ0^zxI<%MG-FAz%P%SLS0Up=QTp|%in-j@o zM3>1}REmA#Eu@Iopv(B-3I4X!n<*V7&+jky&p+@OR47*R4n4BsV|Vm?s+OApVPJ^p zRN)YKe)^RR+q7VrWX%v^_WcO>j-gj{E-bu8x5AKb)09gO2t2UN;-eJ&5|vi8bzOi_(G8ZAz%ojB+MYO4D6_MQ7q9Zo_)GQdle{rv%?KxW}T*Y++TUG&4Tka+wcnFN&02D4wV}PcNBop z_ABd`vn7)T6cRTbS{c66m_y=bOM7U0UpV&2Vv4M#i9X<>``>02Ue_6Nuhw{UrVr zN(h?0?$s@BejjnRsWqxoTugt|`{6qhy5X&N7=fg>`t3Ld1-%NwD4^BsuK`O9kM5m0A8&KuT0IX9|+d zq0SIXp#0ZS^cG`ha+5W8d3Tc)%%=Gl$L1fZ6e{q)#>_pNKJSfP|1pS!nvY+8zci9g zMn2QFl`{~{oJk9qieK!~k7gIAWP~^osM7vbCHVt|{}|%0jf$%+LilQ>vrPHyvDJ z4kr=_H^coS+WD@f=3rS%qAJUyU0ON3*z`w|^x6FIo3;fXW{pM1SDbKIkJ)ZphGi=s zaH>)<56d`Xxe8E=##H>q^5*{=ZC^7|KNmOS7VoVe8{`*DE^2+aa9>cTKhu0-A6*LNI^P z4}YiIFE7zk*;?68#y+8AKs~~X6ty;taCPg9i=5@%S%pajtlhJsR(upaKg2_u_uQdp z(-b8IArKTL1>p*iUW1?mNUuS#6-BQ>+kQx|ks!UdUxk84Kza=V0DczblWb(r!*Rao zP9hRb2K5vGv-^PEl~uqpwD(-Qrr9bGNEFWkX`w zUiOWgua%nrWFs7A(#}u?=6Pi6SYtm#J-CcOx^ys-VZaU05T~W{Uy21{ z&OTdgp3a+kv3NeQ*3Ebf+5vaWA6|ZlonTU`nu{OWHrOy{3?7NDZ@3z&x>srysCA=l zBK9OP7${(A;MGhYEaWn3SuFh5hqjQhqsBL1OhG$^wJgB>b!#~#FYgMb7fR0t(1|11 z2N~u8%%MIV3bjIo>d~0m{m2Kr1_K6Ae!H1EGq+|vS3o{+_)%Wcx;e>_2=s!LpzwC% z9*tE%NK0@ow@03m!^ey&c&>m`bXj?D!+}()`OsR71gdqr>EW}B4SC?tJg0sPF;lBH ztMCe%)Y`0V8*%UKV#HMbKD$iP^v0FGrGiV4?}gGFYEU6_8%rpVC(4h}F!ry8EriVL{_VLQ()#cU4~ zG3l3vxOhBU+J|ldwgrOmcab^K9G(NgJq$e{mR%yU%UQ%hB9~A|$l9Yn0iiC+AR|eD zzz~TS{%0+~QZMy$0}rFKu#G(>H_^w@M8MaqyUu%%Hy15Nc)5#U?>GyAR|eV~4h|mJ z4cBx|%?@?orMvVWX~9d}WW3`FpIsu+`WkJGD`984l)x(I$d^xrm$nKxHPHP)TIbq?HryejQ^E9b=Fumi==Z@;?p(BrswH1|UKFIpg(rG#bopY18;T9P!6%|Fx+0 zuc498mE{NVeK*@a!NG%L#b_9N8(xqZ^F!{kiwvmgXyIT2)-u7Wqr@ldz2x|O5DC{1ysHY;zBkLFFv zfD>{uu-85~UwLs~x&vO?mCeZJ8gBW-`MnKBjw4w12N4C%DhMa;+)DEZ_N`bwLVw}x z7?y)sH?-DZu9-i>VjCBmKA-ixXV{Y6P$pE?2210Uif^NCTU8be=k{~H!f&?v;iO+0 zt6BNwRp7|v!yYslNhC?-2B4RUbcXH?2l~B-)Bx7HMetc-Cvy#_dT8A;J+Rx~5-7W^ z&mU{K$aBJCs*)z-UWh%)Jq0`*IPoOwELU^I&NPkyXLwFgV&(}{f*b6*F9T!N>2Pe( zA(tVP)7GOeHi*KNM{WVT=@>jS8C2LZrJNF}+x!|PJ?rrD5QIq5H#Ja(7Z$nPkGA(y z?8Chmt}=qEzlqB9W&%#vGN!A;x(BhFM0LI0y~h)x4O?^Hsh^UQG1YY5&~6rQ2-I0J z-@@Ny46)y?E`w^|h4z%SyLF0KrWN(skhiMR@7@@*+7~ezXo@7}N)EL2Sg0Re1#Ug$ zTrfpGJtsNfaLx4;p3$D!3-uDq!PYfhK7>NOF?*K1BccxMmeovATrBdUugOz)|Ng=OVG zN`k$#uh&QS6Wda7lgyW|U`vl-h)0I9&T3#%WfY3l+Nfr=F_VYvorU{R9|FN%Q2PrRKX5JOsR9EOSHYO%MR4w3eyYC z%#_pYiCtv5^az{x_Zn)M?%lS_7^!}Db(Xiz+enotaXp18o!=~`Rj$=sz}ae}h**b_ zg;dYXe4Q_e-mXi=G0D}&*;7%UDVk<7uF-dajJ##v%B3$bMLl6Trnh9+weMiVsjz8R z`nb#oHJVIh|7IC!l2~3ViC!?-r|qS;HxO5}U$C-&^gw%3X4VC; zE#_H#VX>xRU`RPib(6d1`I33@kp(h(SbI@w&R4)g!&sk+3Z-Rqf7~k0AcZ>4>aK5c zrcChq3%f|VTAs2zmripc055}LA8^~4v|pB-Mh%yQ@&KReY;ifLE1jvW=>y0NFx0f8 zS#}n{PCZTEjFdz5sI(gmT{Nu2GD;>Pk^{6`l5VZBk&>SIK2kDmDE7I*x4$#XesX5`O7lF)uO1$;!dD*kYmK;Gd#+l z+D(WFy}+N;2?$y2{Nqa6;3L7Ctnrv@dH)R1Wg5eV12|bn0Ylr5mlz3=N4cQI4B^JH zOx0N+f!+X14Ye#R?F&aIzUh{hr8=kjv)UU*8)Z~|1NQ-Obh&=iK|9<4V0ew+5XZ5@ zR9_ZP+!-}Pir+_ve9iybZl5z4xs$|E>@dNzj zp;s2jnmFTlZ`0+R<*Y~e0jKV{E$g3Ro-DgD{AIx}q6?1A3mkegf%Up<7eusO$HMm- zvffOeRq!Xl*Sz1t`J~MMpzq@x081D`@w&kxi(czHhcUpi1E}Ast-pPaNZWF7Twa)t z%O?G;T0CUccI~p{j-qjoB7E`(D+^=~`TF$DsiU_H)A>!73xe=zTB|_0NPri&ern|^ z;N#R6VyP|NP{DK0ZTtA`v7$f<_D^dsxwqxc;bS5;PfVGfiL&uNqdn!T@>fYNVCwoty!=GvjOYd7EdAHXCdp|#${lNzK>S$k<_4mq4`l08y*%VRv5e=G6SelRo#g082~8J9WP6=Gac_GiyO^$e`O&8ysQ*4F6J8I(dfcl7VZuk&B*Ey$*b z0^+X25T#$wdq;olYlwO@>%}wtQ!d}S|JSYM{MB<0sP>>2B0Go>$KeK18zf}z>;L{+ zhF728YixQxrly0lOYeIt$?+m?Rs*n#mX*0txs=gnIaS@Cbr&K@sh>1MN`;=ngX1}} z4!2Q;v(|hE85xv5=vFgYe&&K4jV=Aw|5vL}bPlxWuxy)Mh+&zy*lB81DNW&;Y+aQp z{1auqgZLrdVSvI(LFCGhVQX2z*4~4Z4QdEdKoBQ20x28lWs0(in5HP3uZ!~4di*~& zfekf1Lak+XG9xdDM7%_CXs%1U= zO4HQO*AnULKdrSn!LGE2LALiib@tukyPLy3?*nv4COZA{6o5630U9`Ke8@H|qX}(d zIe%+DDQ{y;$S!F4)zwP#o{&%BZ0txHIpP#1XQ$YDx7LpjwZ^!%-whon)?kBh78K5> z)RgZ*%2L(JNO20P+!;gRcMc4Kzwy@1r*c_otHARuUikNfg0f5YhMDX0c9BdDMOO!{ z;L^f>DjEG6QByw*d@Sj@wa0iC3XKAZ5QRpu1=eVka)3e?L5ms+jRG;Vzo_EZM*NQ* z;@_y?u=E&i14UeGAOG3?chP|hR;|FfyZi#!XY1y@xo%9}xwVHx6Xxl`t)PZik<4`+ zebI4&jMP4Uem-aZE`w)c6`HVe)T$P^$QoiTh1Rc&7aH&L>!T^dvMLsfZ(_0%b~bWa zn9>Vuz%n&b9|sZkK3hb7PP+Wu1_{2gF}iA#O!Ef4jag6{(mnHf`%yGP$~*cwIUz$* zz})nB_vv&->ca_x%KEC2 zuc%;X>QRkc1-2Bkf;LQ7+?$l(2)-UG;-C3D5#l9zC+K$eIN({=s2C<~0q6wx?=Zmr zt1Xm%fxMgZx8mst{T;;U^5p6=4TD>N<~IU!|5TLZbBqn*KD6jq5_bbJG|OK<}_yPOY>*?V|+1d-u|V&H!0q6G;FavmBKOL!QO*ML){M~<2cvV>Ay zlk}YFbIkL590E>!P@;o=;zDF?K#BlTTXtJtLaCY11FA$W>jY{M6NdFVScdZxyBFoB z9hKihb}{ZemDB02`4pQfR)BIh;L7#M`p;tbqb-unfGMREP{zik>dA|oYuEpG^GlPdSZ6H=xmOeAI&>?^d>rVw%8CapQ+1ecxcFf`5}ke zou{778E=SF?6Ijz zz3ZBd$=G9Q+19Is&I}?`1vpI$DKTH(oYw6y=%qyGw7dz2mnYhU%l15(O2}v`1s#)u zl_z%QoOG;9b8#8(?hEvs%^|TSZGTQbEr!1nen<9D4lnIHy)8Ldu7Ryr(sqTP&7SAY z74{!WZrh}d-(;hD6?Iry*!7m=$#L&F>uhXzfx z#m=IbEFVXWV93V&&%>fbJ@q{QbAOBElZiOebrhodtQq?xI1=J+URrX4YiHZAZ?f`N zw!#`LiNS2!FGD99Hr-9=*m|LAF-H_W1<_|0LWM-GF{ZjYWAzR~_CUmTVz1i9NH@^p zwM2vHYg8PR$lq(d@?}!(?^VvvPX!;@Cy>Fz6Z+X4x%1&uJZgkHaH<`yY9{^7k1E{Y z>d&6vr{h=0Y~(Doh!}ZM0on2U2)%JOS{iX?gktq45x&B#O=kJE&Ki{nX`|+{wCHOpU r_`O}ex68L5t7o{H^FDjp?XLg_{_FFg4uyz$ho7uz7yb#XTz&F?9oV!T literal 0 HcmV?d00001 diff --git a/site/content/en/images/viewanalytics.jpg b/site/content/en/images/viewanalytics.jpg new file mode 100644 index 0000000000000000000000000000000000000000..08360b12efb7eb181c4e521446d12b1357d21608 GIT binary patch literal 70692 zcmeFZ1ytN!lPBB+2*HCp1h?Q8JPE-A1b0Y);EijL#v!-_2p)pF(|Dr|gy8P(?$*%T z&+MGp_jzaDXXo4bzTNruOw;%D;r9LCd#kQg)vu}=J_BC?JXesBmjNIlApt%j{s8cK zfD{1v$&)|ch!+atjrt4~6$J$q0}buzGb{`&EKCecOl%zd=h!%SIGC8viJ#*U5E2m) zVd0XH5)+c*6A}^r(FqbV;u#cFbW~JyLTpTI!vDuFcsl^^8B!j~JTej;;0YcQG9D7V z8$bg9AfX~=`^SX;`GxcZF~_H9&(JY25e@2|1D+ruBR@ew{$tjN);@^u0VsH=__RE4 zo)V~jLZfpecCwxmxO8%ab`ZFgtFTbF$ zsJObOwyqxB(Ad=3)!hT_?d$(FHa;;qH9a#sx4O2zvAMOqv%7bCc7Abrb$xSt_Xk}_ z0ObEp*1u5pcXZ()=z4;Jf{cRp2VF={+z|yC4+WK$=PCXhRkTlz1a!Rq&j{bfWmk2e z)AOmF5SciQVh}U%uQHzgLE1l2_U|Js;D3p-e@XD!ANx}k%}LBG9N=aTdzJtPgy(#JX+ElF{O7tyynD%`K{&u4A9SkW zj2bz|uK1@aP^YBD!z|>4PUVp;`eRj^wAJ6%#Qd42-^6^De@&-w02D}+_h;HA|3o#n z&cA~Z+EAL>m}oJ@RY(*y?YK(?-affc5RJuI+2JL69D!@gdk19qmn!Qvt~poD`lYK%>?Mz71!oNRnb2%CZpa{J`|jdsS)1 z{IAzg@w?mqrhTB1nwf=@ITeNWJ#TYnKF=M+)^sFc0V8^8mP>$T^&2F4Q*@OoA@&2# z8|wUwi-v%|JRf<#MgRYC#0txxlW|UR@@8)u;vJ~`HtsRNdAn}cQ@$=sHgaQf=(-s) z$d#1H(SGU;VgHl!Xwdcl$&nt0)P#55Ez+Jp>8A`1>>KW5jn2JG6%PJXt1A5=Ae`mL zlYdoi*w^g4k4ffWvZqR|kZ{*D8xH}+?25&z&Mcnpl77u-e186aa?>0%!!g{=(~Bur14zrqs(QN2@B#=O z`^)3chD3L~i^w$%wz+?w3O!up(o7YCI=Ko=II0vWdYkck9O>lTDeJef$f`XzQU?hw z7CZ(HuH79eR2dwlUVf$HTTLP4kzwNgaXCa*DTA#Ko~kIfC!VyWmDdtT@|kAFE5Rqk zV*Wj&)whD>D!}fc|K2k%@rRogiobvA+Z@nx=TYZ&8=!=QT%c@lLHlL(%TpFjk9?1_ z9~0$Y#*F7g1_`e8InH}g+E)NY+HS5QLrH?~& zF&49H+`Z=nbvUm!&aY*S70QG{F6KiLbL$t(IE}YIW%MYzAE|=ao?0Qe4N(2J3)1cO zy%wn7`&9M;aVhV<6xeL?=2m*W5V%q&iIQyfno0A69AfLIUTa%Hl>0bt_?#*mgI zNGUJSsg%w5{D|gGK{EN=hfhoq4k$oK%CVIFFZH3H8MnjvS;aVTQay5{vsT}*nCp)r~nF*Jm9f_19HY7G9~B9tH!Wz2uTW>F&TyfWb=HZ zv^O$efLcEwWFBmI@)g0u9ONc>1F<=MKlo<;TYL0>SDp3*WP`>P4lqc&CEwfs_P77- zF{r&#iCE&Cgi-yNHQ061+dUm{p=+74zu2ufbm?>BBg+qArFW**)M&fZ>9TjOQ&OF1 z_%9@JFCikAwQIt$Ue)AU0Qv>XT&CI6|I@fjDd93$NKjNJ}jWYebdJhNSQ{3wp z`F7>y8e3U>{eg_jU)3E>tj*9I8JjI{LR7wIj0?1{6^#fQ2&Wc{+r4c7(ucR?=9V6?7Pf}&a9=IZ>h`=kGwMv`WesA@pc3M>9o z>;SRk6+iUmQ>pHV6*vH}Pu^v)IQtO)V$F+`KJzo<1a?&;1}qk110Nkj%A~S3CipP_ zW?iZ&dTD9AtBrEpx3_@JNM5{WBJ#;?Ldcsy@|1v#GZG=3UMpmA_sAArvCpeOm3Jcf z1wS!&$Y~C#$T#MV^tW%;Vv*}YO#?5AcR~Ai`{&@-`}xb$mk&Cp!$cnYy5lX1#q-Qj zVunCUcV4zP7AL)`tsw#n4n-pRyuSeLIJqHEXQRdJl<0_pw1}@YFf&T8-?BY#xP9g~BcejL}uHt=vSgJLd);>OKpM@9~kEelO;C?!9u+jMMgdnVV!< zi9+!}*MLjn&75E~AwY3R(sOqm$SJ^bwOk$!jC8kqPA9i!X=di&$9bbCNUK47`@G!l zZr;mk|G|?tu_?)gCcMeFZdDZMSb9`8rRc?)3%=FO5n|NT7VelE4DhMu(@b77o4O`W zb7p=f!OiT5(!E4Bdz_12*I&|Ie-RaR?QsUl+8xvEh-7>bPD;2>DhM%=?xU|P8AOFJbd>0{5ZrCSRJ z)CkS3B{_?}VH?B>{~#=?5bTQLx9&I;50*$XKABpMs@U98T4q0f@J&90#4ZD13Mynv zhC030*Xpwdh8

7BvU;F;ra)ALMkeJqJm(=dvq>f&S~*fj_k6(4&> z4lbqgXCq2p<#|;n)x1AqioOgF&=REhVZe%cPRzua*13#%0YTkQ{*oEz>}hGAX;pEk znRCWv?uq#|*DwX2Y0dvzk=a}Y!~{5Ej51R;zu*a`n0r)u7c~&WbNCy@XSumHVL$U` zE&JdQR)H|m5V9xGK3T24+HiW&0tD^Ns^~pFAcej0S<4^?(!U+69l-3F+{heb_ev>$ zilQ*={B7ZY=zR%!RHz{2ZIJd5Rg?wB#(G}&L$Uy*#d zeKt0?w9#W({@{B&zCEp|1L_*4Ea+Cj?kU&b)owBj?pU;(PS6{;DpUxlp#`UE&djdx zXsv8YdxClnRl2FkZt`+fio9hfVI(K_+BG(oN2MRfQ3ORIy>h$H7WEYCg@2-1y?oNb z=y^_vt9S!CxmsR*C@(B3TCx$E%Uf-0o#4u@57*sf`P!H?N18JCRwQsY%nE;FrI6cuS>>Fm~UcVU4QbF+L-ZW-xQ|Fyqf?dh#m9PpFOllsbD2=(Y`Q zDu@sxVi>UaAzw5liE= z=~uUcF2ULRr0}G2_b^X>skDFXBdw=S-~d=4$z#*N9}SBRIP>khW1cU1sI0;PVRa>VWG&+dOpQTG$iLG@ zF|nXQe33CtxdbDW(|9@F@200axeAx|i0^`DswcRf%daXP61c}!9jc>FY-w}}_jlmn zQn+olGI#4yQZtRXVl(Y-M9*EJ1;_;Xt?GP?m7|vo);c8#Iuv_f&Y7DkKBY~}-2YKR zJ^)QD@Ekjks_De~!#%ZgneWI|8wLU___f}XDh>@Tu)M4NVxHmX?)AwoHKMLbfrugg zfUXL4nfH4L^v(4(%kqgmG>Ux4;CGlspmvF4$_gU^g;7OMwZ1urxi z5+FZ3RPKpSmJO`IbI_kxA7%zUy=k>mOgs$@lUPQP3HU6m{oC6qH+zBJ5U$Bb;&V2) zd&%)87za257O4{D=F8b~11tgiy9!?n6U-Mc%%yp(xHmrQ$OTh-JJHpKkr9Pwl>SnF z{o)21w>GHWOO80I{0LbFvpQuClOnP6 zuEw1p+M>aHMA`*m)b-b~b9m`ZI-5@sUp2+mot3??32k7dNkp*)dD2t%4LLZbzRS$g zXCD#W3s;RdrRV-&^>0B=@yLD zu#P5tJXUKRHI;l7ku9dJO&tAN7lqOB1r-uj0hJ0pT4pQz8zKQN0{xvm=wY(^ch%=| zaY+DHX3lo^%Oc5DhT6vwaP6@pO&Y;*3=X#nW;5uu=X}hUq>MMX9}5c~E8~9t3bMciJKj2{yA6pK?mVfl)R#?t6qT>6O2HQv zEO`Zev){WOsH(R|;``p{%hU8&YJo`Fq0Si^QLGcuwF(N-p{y%0aN?$??(krw>3Pn9 z{5Cr8JQ(e&PYu#H0t+$~iYz;*y=Sc$nOAa1`we7JczbPU9Cv_jZ`~*qMjynZum4px zmG*5jAnwdV?YFo{I&$n&6~dgiXHzsxcYA1IggBYHjC`>{g!IT&P@g^hGDf@7&3gT> zpX3cweiaL@%>IhqlM%l7@p;qa7domujmsrrZJ&t_t=#z0&tS4`Kh}$Ggtq%aQrMRb z)UR?1dI;f?yzrU^p2VzTQ8R!I^|pJc@xn!;L{(Mut;zjJJ~V(c@U_FT=kNjsnkCuF zSH~K%!n4lQVa9O4r-dTlP`w2H+?(|E{qBi2Ewno0bp*K4b&R#u@Gm! zE7q`?9>~TLyER*DGCnxm+yn8-l#vSuZ5vUJs`&t~z~>kI-i|BFH*W|MAn3QB z%2fu=@gaA)Zt^EvEbjUAFWi?E#geARw@l;(4m?(5+s2VCVYmeZSa;{HqUT zc&2An2HOr7MQ}hBCmgVSMgv2Wgc&LaPL%z_9yGHK6d|{;P&fbyVYraUUNJ7i9;M&_ zzil`mBIosgJ=y(#7l%$`XnY4n(7IKKjcx+fCh9cXn$KXvbUKN??=(oMRS$kP9^aU% zpo2!#_22*qLUx`O-_hUWA3uU<5s0HR0xTbW`05S^NXEheD+tVIZGXqg5ywg<-AcWJ zku*2{AgMP{op!)RS(EL9vYNa7;F0$+9DrrD%5{ffV|@Kc^C%z-<0%M&?68)BmTy2e zlRkKV?r`U^yBQA1>I0onB1U(uxEn;k?j!FbA(IOwkN#^O#=&sFXGu&C{cb(C3hbjI zXYt5gM@n}B8TBu*zvOf}R{WP*%S+(^6GJ#)cti3A2XxaU`M2l6^VRQOg75MwM5=>j zM8oRneqA3;Rzm{9y=Ka!b^kQD3+Qhq^rag^Ey5u1U9PlFu@`uz`(*KKVMf**W`ro#tMu2k5gO0-xn18C+J{)lO41w1ElSVQL`D=Y5w{y)Ra@gg( z6xgVl_w_7k3!VeQ`uh#9rCAnDk5;eV*oxJu#ATgZ2W&(xDwFcusG&V zER^y859|-?{|K$$RwJR`Um8Xbn~Tdc#DnffPp)$m1u_b60VnrJ<<@*lXY{%~a>ZRQ*UE!{J&2Zz+wrR0vAr>K!}BDWt6Y(kOZzTZlbGLw=G z@9I8y;kq<}=Vc#dmS=CWJkNk_B{0}<4ok&14#-Ll4XzzO`{H}uC3&inLY}uyM+K6+ zC2R?i&i+tYrt)3L$p;mL%Yi|As)~2v_aUX5kPs_ncIe^~Kopr8N!+u{h$%cGAt|h9 z)u!Ornvb-!cTSn_QqmXVpky#LKA+Km4P~X()t86``Im#u*T@546H=g9+^tq`Fa*c~ zQ(+16z|^a1w9+F4H!l*@u#rgm3XrzHaGvmUQ1V=!e?{Reyz2$zIuntsanuHLB4$3E zS6Gq7@K7x#RB%(h-E}t^lu6xMkD4d)?VcXf&mH{@5azmcuc=5C~?3V!1+L5D1j#^Y}u3qCVq4w|+!$W?H>}pDGh;b+g7)Qu` zk5jc3+V_aJfoA_h&8AO+01oI-sE}ZVsf>xykaRZMMm=UzB^YrwZj;94;nlVo&nDA4 z$McG~Qd#F58!cj#y>+1{Q=anDEw~l*>5wM;;2cb-`Wp5B)NRyH_~NS3sl>JP($8shk{N(k_fN7wsXg1L~Xh{ z96$*Ay{7X^OlJq-VxkCm@)mJ7VAR5B*U-h3vLj~8w0^DH zaRQnA8P7G%LOf}GD(c)>E&EXlU(x%ks$;9Ai|5o;02wl`50=d}cnJ4Y;*9S1Qs)-O zj-raUnGrL{Kqz>w#t+1aqtn8YupoqeykCq^zG}-oj$9C-Yi#`(EIU|+E6`1_NMh`WBu9vexelry;u#YuHb;1cd@E*)dl;>a$^qLYa`ix~PLxYr zKmXVpU9zfRtR^)`AqukQ9>US|-!7n%B^tD9;{ey43WltzNeHWC)2A5 zo+T*msRjYwAD`sW_W|y*6jq?0CT*3eq^%H8M{domPSb=#-P&SiM)S|%5Dwc*$uI&B z8^y`mR#}zb0dsp+=pmLe;vZbQT%qFkr-!Fn;~ms&Ar>e3Sq}Bnf`|5my!g>G_}px( zy{Mt2y{ZP?nSzEiUG7(ovwE}p$?6@H_zW1`GCU2e%1`CgUQ-?kAF<&+H{?uMY1QL+ zyQutQdbE9=rL-BvlRiMjvEx|jGc57am^i7m;f0d3&eWJ<7+_3eIc_@0jnAs%IVlAX%mGOisKiW=a~~rc`1y zXt7d|sw*)z#T&{g+~3?VB*XJ4?J)7nS|gn?EIz41$#~Y{vluj;md#}qNU+m3HgYYl zD0hn1ef&&sPb4_MqR?v2<%ufCbP83aNdxvbO=IpN<5{*k!9|53%YFx+B$s zHb#|9ayC5~^QI*CN@jk>T1*%<+ZYUY6&GvA+;d!WB#xmboPas=cQIe!&`MrabcSP( zs1w-7XXQp)pqVX^5JU+A@ZL5~UwN|I|dP_IYMN*5jXBNnv6eF2LtE!*Q&k10T5{a*5Sy#)eLOF=fKN^7l_D6MK zq@R}s@)_Y$Q^dsCB5u#uLPL+dy0li4B@#tlX~^tRnI3-6Hz{yxH1=7USQ>E2Pe9Rwqj4NDqX?u;Z~K7fq< zp&qG97Rsj@LM&jXUXH_3!5E?$eb=wyrK9~g#~G(v+`s|=Zi?ejXr!3O68eo{$xZ(G z!S(sfY`;iv0sgKdOJDS>yLjFDLMr)Jhp4=K&L%eV z8d^kI8}Ysob=9d%5n5PnhO>0?y?PJ1^o6z%F*uo<{qe{xKq+AT5FM&LpcF`k)mi~# zrg`2Xw1yj|U)M^aWeHQywY(BM-yQEIDqiR{C-b{k^v*VCwuBn+nLV9r+b;ExeS7-6 ziM%|bV9#W4{=l8awl3OO1be=vU;$@iacCUs+4BYw?8F=bsl~pA(Kl(&E3}7fAEBx4gV|{Am2b_?FK{Aggi>JZ&sAeGFM?6%*AwRrwkdQ|5B7 zq&2QGNdnDLNZM^1zY@*5DS#ccRy9a+eQytoo525?(9CJS0|(SH)j6&{yxqIPrei(e z$F_bbkak4v=?R_%W^Wa4~nlQ(vqTtH6!ptMQ^Aw5{-1)t&>_ zH+v~H6wacOuE39qHn7Y#FBkp{xmS<$!C(lWRC+$y(_S`d3PnNRXIVPt=YfI9KY@^c{#&zEvT?K8-C$T3G zd(K|_lt>J$GW*v~jp>bd_)^yaX;Ut?LKXb66L=P&l*~u{Vi^tnB~9C{wGWaNLxcQv z9nxnEORb2&nEm=~=2NfUrmMsXsnyn}`-)?^2~viacQW~p z{<3|u>)%dddQ_IFT_di?WTrhN4F=BI3tz#*S20584D^l%nBBo4<`s-VKo}n z9s1Ip*!i!)0oB;+Gzh1Kwa}Ve`e8bR-zM#l zN|S#+rS>LOpV0vLyvhLj^y!mz(!$VYi0h3rC{^pM?Z8=CNnrL-46?BZI~+OPrwK=d zYzd^5nAL(X=6exNFCeB&YUk!4c!o3bbsx(5;JG@$fAc`tKHa;Hxa9twNBH(T$m!EK zn_Ru!InLmG8}}x=Y|40py(focuryC%X*@izbqzz>!eQq+^CIoZP>k&-?p;A1QtxAy zylU)BXS*~zG>ZA8-X*d0Ns5?IU&7{=(WC7XXpF#AWCV4JSP~xJOd$Z(KYWXuo127+ zB|7ca7yKZx+I(^Tz;XAm{kp92O$QlS$G)O8A=dw!(d6vGH~yzj_B+1>mz9;3I^D+I zRxr$VkkMOqP@dj}9~HMQW!V(k-fPwhN$4(Qx_r9IQjO~YnA}9D#jkgST;U0{B*tlr57H zIi)kiTmB<3Fs|deu)<|uNr8uIu~bN|fYv3H7pa&!CLvxrj6o&* zDc?8`xNRL&vIorv*YqFW92_m_sLjb$D9pVxRK|Pu4;x z_8R8CxV87@v+-o_4)7;nOH3#+9iDlzH&$~yIS#!Mz8k@mN(7(EWZUDIDyn|AOtK!W zZkFr((AY(Mn0Z?wq7r^whpnihegC4iC3TGlzr?dDtOKfifbSrxt@w~^QI~uW3j{VN z4;WBE+{ONBq+m#}?crtpB=4Ye5}TlmzoM%oTR<-v4Uf2z#LpdDL+0 zWO7G(9kM@1nSJO8Xh1Q0{^l+K3C5N?XPi~?=F7G7H~(jfDha=EoWzEk8|ufjkAWS# znU+(7%nv%&eQQTB`LYO<^P`K|%so+?0rP5HjP5c=@&k9y`H4{I3CRng0re%1v~EE+ z_tV-WKE;BCpr#36DMQk$HSVvxz@YgvUcz8Eu@ESFuZqp8Au*Jqu&mY#)1y3H-jPzP zuY?adb_j4A?Unkqg<|!pvc5`$IMuc+_xxkMVBD>vg#1EDBphIP5Ptz6W(XX%72j>ve8Bg>H{O#_o)@3Uny$~OieSMbUjKp58q?&Az*W-#u5-7 z7uUt{ll^HuT~|oRkZf>{;ZQE??Kk%qX8wEZquuJRj^p_m6@qP74y&IBf(kfCjpIgD zjkHTV>^qle>Q1JLRy}|muKEM_nstz>E|tRWGuE`qP>x5gFxa~g5mr4XKA?(9MCij zQ-$T*a9w`~g&3zR*EjYf@H}P^9AKyo2T13+xDKfx&<2EYf42v@<6w^$&9FsW$klKE zRVKv$$d16~!bPACJfQtj$Q@6KOwHfh;eC}q!&v|oA`dyq?g!_mZ+yHtig97zl3Iyd zwy)w71|BkeStYN%*^9Ffm8lCVrYF&!v~?|2f7<)Q4>c9htg87{i4NGv<*hsdRB&6FCtMP|ZGAx4)wh!9HR zQ>Dwr^fGJKJgso}MY7u3{lR;@9z59MN-tF88WCs|ii4~d;p)}|Q;ot60B zIoSUFd)8cY^gZZ5ZnRgi3cqrL#~H&_d7tn;VO=lmCp+-*VITH=>7j8Wevla4OyrWt z6wMeJt@B*&f>veSFs28{WFJ12g=vLnkGc9{y3OnSK(C;EQEF8yAt!>tD+Yg2zrRU>8b$XR1|um{ zBdO0W&6Bi(Z(59|@J3rc3rU&qT-H$Kb6}e2Y3?r)l*hipd z>_L)q$1z7MRpUCUZfyVa^!Wif7T?TG)ykwT7hTOnXV^EOx$8?I+jZ1xCt=EUfvUA( zti3bqOzcW>*D-@M-g~<+3eECuSI&)<&yEvgvo}ox(A2m6UMBp*2X6Q8-JfGcD~q*& z5Qus8c2&vo8V4tZI4_7$zG#r|ZDCTqLoapDVsm^inb->3s=F^ya$SWeGhwRT{?L@6 zmmToZo1bA>)UmqetaW+4YT8@qadNCKL(rx3{5*z^{R0}jCPy&07z+RDiO!$8(0zshsBXUvFBr`6wwevDsWwOrFQ zs7XX2e#EQg9y-b)s?crP3Q3*dUfWyi+(J92ZLVcwh+GNu`}OM$N&uN0LaMzrf)ZJ*ZGo^o5mwERTT=7!6T7uC@IQrYP#Uu21KM3cZ#XrXn|{`)wvD2}yW0UFcpc^0;@^%MF~#J`fS5ct{K}%Q#ALX2YKy>iK}a;Jbr7f|0v~ znW{)io7}BeIzIoNslFu+7+IMS8HBrV_)+uw=foIs%jt^Cid*08g3H}UP_~w2Skj@R zqcdYmoF&h0}^dbwVh^HU_4K{|6DZ}jDUYi-i*@>a<4fy3@TjuO}tX*Nud46_pozHcK9HFE7$WS4}-d4HI>MCK~68yh#{N9kR-~D+sAOrG9g1 zF>YTFs@z=5J=gC@2p%o&rHwp{^Kk02d5-fMtvHEtTHd{&*JG-5Gx^}`Yb+-ktz5@d z7x{2nXMHj;hvhNP*X&G!zM)wiKDSFske<&U2A7(`;rc5*Q7ewx{r!pN^e*d>m!Itb zN>FLWcDxlqvZ{2)*k4M|9+`K654HKM#On&K@?}$Nv$GMx(wmHIDW77N2z$M?+Ok${ z3^{&0S9}Eq$K`_S7`+>iS=GKm-JIl5(k+lN0E92SG5F_ov%yoB{2ci6UG$zRb+ z-eKdZi9|M-zc`cc$UZJWfE+1rmp#s=5g4YA#N&_`*(JAFm3U7j%f|YrH#}~_?Tkd< z>?kjL0@n(U$_@sr`>|?nZ3p<@{C@qr>YD@Qr)nKDd?YQg^{0syT_vK#HTqunGr+Pj zR2v;wVzepdv5c*3-0(@ZX0J3+32q4RWBlRd=HTEjgIKvvqK~WE(S-F|!uqneS0>ApNDmnwwG@D}c_ko$gokf&^(bo1DH@zySc9D09IVb@_ zmK#qk^JX-j_lzYycdTzcgN+d#8>>o@C5FIX2VfY2x#w zyG-$BC}*fmu>E((3G65W9g&`v@+)l-myy)_ev{l?AEOsJu$`K z!aH6!T9~1VnRH`x`qz%3#CzyOiz}P%-r4S)_~!AIcjrPMIiOl%r94|t^HU@o@Z2C0 zFZPYd-R9)M7oNuY#=0>>?cYg;3iP~hf0%`6#5sAHNl4Tw7^6x%`xGq>7cMC|dXmIe z6Z$JvKYdvoY>Mh97Q^Q&Nj#Y0++|;`H!J;6=4HWA8o`o@;-4ym45;oWU~(SD;QnHg z{N~b)m-&>bb}DwM7dx-kt@4e)L;7iBVuYWr>NRI)U{;F0q z5IJhg)$m!^{Ksd{#sp!nOq&60)Xz}s6QqX&r>^wf8YW%Q<1~r4=u9+QW}Nb0S4C1@ zo?Ri21iZ?vI*FX~xqRrBcrg*MBo<%GB&t??z)dQY(2_`s+4%jF2Bm+vy*|QoRk43U zgGds2@TFa8wZ7J=Yrc1>9Ax0L)updZ8VpZF+X~RKh}=lGljH{Y2k?z*8HHv?ky%)A z*}1ZpgkW<_F0~)}2a`r=zD6FEb#k&%^ZySyBxM=99b%~}eG?gF8C#s68#?*c0*33m z9uVO%qgOK^$Q`*v5AL<3y6jB$>r`JRV=7^<5n(3B2-mQ*BpkprT@)6~ zHd&x1U}`0Jm8|_pFqkb;Tl6+9_b6V!_a_{H2hO_@dOdQ2)`sohu+c7QKHX{P&4*R|b2>>DQ#Gp>6GRPE6~7QG7Rfb5A#t%}ODp>5RtqUCqa? zQ{>0MHY$XJm=NJ0PLrMSeE}MAM>vq_vfzLO>&D!5mKKGYx<`p*@a(+ykJ1D+9ruEp zFZ1{FJ9UC|Cz7ErTH2apy6z0zwfjyNhw4>B8?=R80_N!C-q+uuNXX|CqWX+ zW_iJ4;3ZxQ%WmeKQTaiO(maA91QCMk+xyr1vTaIj8F?lnq+pRl-< zym~?CHzH}K1*7DhyhY9(x;>2wvW8c=nOA%3vu(fU%bh^C=*w`xv?e0=!OD=zIRRl2 zD_ZT% zr}Ot>-m1(DxR}l9gzae`=*!}PaG+g3w`wA;DC{A?uA1%MC;@E5#k}9}VIP5c ztz+})jl)XRpbi5s5_8yI4ZGQ?NF;93%$5n(8Z(;cq*0MZTUC1Ou_u5!pZ}B|qc? zyQAWJGR1sI=E1LJKz;|8WTnTn(XxW$@7md_ZoR=cT>$6UQc3bgn^kc+k)AeTh{H!r zeon&jv)9aIl3bM3AuTDYzDvqEO2(%1Iu%zQ&`3R?z3&z6m}^l%Rr91PyO)h9MR2ra z&8+Xf)vEKgV`ub6CWw6C^x=2X^c^ri(|4LuYc(d*KUMandlluz1x>Kq7n-gLe1SlH z3QMyvqM0#*zAl{YOo|FPK(FZ@5%m>3BWV{v_yUUJ4%6_sc#v=2U%Ml#(pjjs-Gqq>+AdFGG|(X)&#L~PoujeGrq_kblB;y$8^qr zd%pnc)Xd{TNvJe_O}aiK#5^)}?m@iV)uee6aP_H0Y|H`OoIW1J2EM1C9T4i_3OVOk z3l8leW+C{6awznniXvODj^$@dSK%M0fh}Vz z`$}=(nOG|vV4aw=m=5f=y}iC~fLU*v-h?VZCVYE1)4c3IyV~%46Q3+Gpnnn>1k`(#_M&$Z0(d?s;p@ zrl1OZd8RaHQp1w;jL}Hsxc+vivIDlNF<@wk#n5i;SS52;{&VuZ(Df|$_uuAHv=Um% ztE5oTc+V1YuD}YHG%q6Z;QD*78lhyOoU*2O>Yu_{ewUh(Zg}^+86u&4+R}YIvcp-Y zsL$BQr&y`q&6K>!Y)v&IhU2@tyQ@Ke8j`?S%0JN%yKozY<2%g!jp2iQWKs5%?t2UcT)bMY>71rBmCkp!f@j;E+6KI}ep{S9O zOsbx-yjzkD+OaNJaP0Gv%#b^9hT?_yOkWYg>X?pQPQJ{xp6~tQdi7&OpX2wbEXsK8 zfDQKoy%|jtg`_vmBbHYvr3HW1NrsNRYi<{o&=~ex?I7aH1}OrTetgy#*7@VZm80E3 zi;fjTdE4OJxdmQn`_GI~`2%#3RS9HI)wgK4*SK*O{Q`Us<+E?FFDUda4+|>6XUF7Q za%CHhGv*_YJU+<66v&RPIF(-Na;u2d^ zqZetqvfqIAK$qa~b)G&diII*|L4|S$0b+0Ju0rdo}>bjbSy0c%;j=`OsZ{Oa=^KvCe<$_Vz&M;ld zY-papeIz$Dzp{CC*v%o9plgtp<`Bs2)L-Y0g|%&2s&D*sFCw?!#=XdDT)9Tp)!q>& zgYNNxlcP7!bp`wZl={; z0>U{z?U-crYV;DbuZ%X+zE*BaJYG+U8j4J7Pz?x*)({lJ(WXlgimII_^0&|9QK5So z3|vh$duRR`=QsVWHk;FL0M+pOg-3)KQfNvWMgYR@R8xx?fM{Es zC$nmGIWoN=&4ILw(^;1p;!WrBo>7-o*^Z@idQjaB)2~>fYJzT@g!Ll;hIHhysso7^ zR$x8qj3VV^w*e3TAoE{tmc4F{o0}g%!!k%-+o|y?*vso0BD{38qE6GC_;^RskD)qN7{%EO_Ue$Su(KqL?9Q7_Zx zRs3EE?ENCplhM zSq2SNm-4Cl!D|8N&K+*pW#5r~oi2&e#KinVVs71QqMuecdJLJbHG_gcWoYGsE9Cnw z9GpJ9bM|!$Wt*Gb`Bq!eT&$SSNifpB7b0mmb)Qa^lsqk^#*H_F+2duNxCFSOY69h2G_*DKtb4pK*`zi+Foo| zN5WE#9`aHJJ$Wj#upsG|iot+X(ejS1FGNIJaWIF<34tj4_f_KpEL-CjVXhi!+GEED zFC1xOZQbMNugz?Ye)n=d*%Cyyrim~jOY2ctoovmzmo|$>{JKg5as2x*BRSJ1o?9-q z;wqk!WC|MkO

0_5;guH07J!l|_BUmIG#90W#78VH$=U2 z`f-z9xV|dym7mO>LR_r(E)CI`VclV1w=sPMq=Qq1;vLZJXOu4=Q0LfFPOqvsBYx@+ z6So$4IHrB530mB94E;KJM6czCy?@?^9;NKTiNl_|_!{T$bWohVZ43LGlz&l5HL1FL zex@}=O`1jj@i76|V{I-(JoYoemE&LiHk6eI=T%&-%`%CvX7l*6|WZW@~0a zQ4<+chDb5L(OtP08a?{q($dgvsj7`kd1}ZniR+?dszs8lYOrYhzMX&E;7}F1G}GSW zvyTfSP>~NY>|duNxQEP2KD5gHaXZGd4<{0d>${7;T5HXWt7|1Y zvF`xQe!1>m!VFa9_=IQE+|bgC0v-?x1BDjQB!0;YJxJjHtb3`5^7&D~Kc(hoy77kZ zO7+w|z*~qq>hr0p_xH_IP0)>O4t5HIg}Ax+%*3>D^>P$fHRDZ(_m8Vkc|6b#U$`a< zRLyR1`L|1z@^g84c27{dSjhC1{(DfaM-0v7&^XMfmhK_5g`=QIBs9!kCmPZgPJ37M z=sIWY+uc;3-_UYXRJ3q+n`Ac&+b)iMYEm8zSZF|Hj^X2Su@V>*9k5C?E)u zvt%SGIiny+L?q{&83rV0MnOPw5|Au8gXGL8QL>~V=bSSPFpRhNKHojt{ndBQJ@?+a zRp<9B|6q#l>h5{_o$j^PyVmnO>m0Ww>1hoj5F@HEti4wm6;*~PaLTRN=*?btNZ#mz zU$NV=E>#~J+vBHZ+e^~a)E!#M4fAn_``xO2KcxdhG78{+>5y6*==b<~R-WsMk6?L~ zS;X8|9y-BOu9Pju3X7cuK`Ck$B@rz8hFC}W!9G&y;@5gx6!Ep0t3N?zw@;p{mZS)J zZrHo6h^>gMLKv)SGb8~o2^y4VTJkrP1^Rc*t!c+TEN5lo16H(A9 z?IY;EUpCVE*a}FWvBA6nDW;}WC;j!UBA^z#>r>3R@)YF28i7vMe} zvD)ykx_Ay7m_M+e|{<_?QEbRC`g>ejTh4uea45qGxj|R*3*v|Rvo}FZ z@wzhD)NeYaG3<~z0Rf?7Cs?Ige~y*)-$smEv5v31VHFLvSvdI#;{3A=5Ao#vBke~V zArR*`G2wKFCsv;P)!M3^%FgyZ{`#QH0C?16FCgAA2h95rKGGA~6}6iJsfJAn{sf%>MvKPiDEk_a#zc8QSNc$V`sn@H{TI2Y9@tM%IxXV(Cn$pZ zT;l?H2pG}{q;);WR)5TSX2`0#KSj`Gm3B=&s`1@}0L7P>sjO($zrZ?1fD``Gkf{bI zM-A1fewpjTwFtA*kV0c&DT;%q{UWg-$IiOBS>`;{4Di@5VSmyjL&dI#03JcgpFGVE z+3|sck)a!Z-}kq}g|+p6U*aEA_j@k>dW?S0#qYWJhYk5{FaENpzvtrjT>O8?rxr$x ze4+5}hj#x%)k${rFNG(n5-$7ilSS!nTnsd2SUcKP`iB3m{-jn{-voKp&%ayJ>UAsE z>>Fh;6(@;_b5mC6;8@Lfz7{!!%6LW_SGM^(I-o}-6=FGcV)rW2lUAcjcvsdEG_D zH_p{0l}aW{OxZrxw$-)ZQUOU?4@(CEh0k)!;!J0gOW@!U_!Wao1v{|~u3jAm?j6qX zTX(Yiv%NysOIn!`fV3=}yHrdob56&)0X5nXtP;3V@s;s%my%%dmY?05dlqv@heM_! zgs#SsWtcN~8C+L6QdWA#N<*1MB(N6+!w*uS?WsR-(KjEhnzt9oFZnIO_W< zA{l&^W`Q)yoSn43xNB3O;7~a8r6wuEI8wU#w$H|zDL&Rnby+JD!uiGU!C9J~*667c zHr#rdqBgTYPtN9Xtk4{_`N(T@Ua_i)m}LEINOrl%G3Tf zn{=C?9(O%0y$z;j8p@i(i_@uGDQboqrp+~PX9ml4vD}{q5{=TxV*2q)3>)eY%g|)6 zF_rX3D)3JLpZ?zkpwVy>YY!{5(iu(0b-wZ~?6E6fUIn92_(!D~f0Xw4Xx4R{0i6Jf z>LSn3ie9LVE_7&2^5PI0K|8IPadL)#gQQ(CFd%C1?`FHnwE@6DuK|6GZz#TBj&yZV z=_e6Z{_E1&*DppV8_j1j5!%i-3jk!m0)FMu8Z`neR_n?M@| zr;NWwB6l92kGzov28@N6(2@bVIDGuu36j;Nk{jE!*9gh3#zxk49IE_ZP8k062KdK^ zsskWV>V7=GAa#~Dv0-c@x=a0|E~cHsxa{OUj|ocsULZhxz=rCL_z0TZ zdp~WMS!h$7uY9j&*xOq)WN#x~E8$b7`XXyK0-B*2xYP%GKFh*0FDUBy6oO z6FFN$2z>UW{O{dE0+{y?$}8wvwF#-cZ-KajaBqb3f6qN zaaNJUUg^j}V6u^yp_lIY52gdT_)C;1%Av1!BhJSj;k+F6K~;C!M(}TKa! zVoRd(HCCZ~!JyMq%$fXoE>o06tT3bJV+Ib-#h$O!5>1ORVy_0YO zU))C(E5)zWi6&qcrNIo;qp?--gF;n-7$+<`vQK)#p4BhoXg zYNVOll_PF?3!a=Ze>p+;!eIYJj{|wFB^D>40;9#JnCg8Sam>)%ym@HU+0{Phct+x4 z2@HRt34okd|h2KXmda1L-~e=O)F(e&~oO;Q-l{dUlNr?TdJCwfg`7 z=ien*{)>_;q3paJ6R))q4w?;(F($P^BQ{Jih&%7@n)<%Qe9O044jOHSEGfx2L&~>C z@G_2$Kcn&*<#!1^66AY(C~=F~zB5e1i=KG%(s*(%d(9EA^4N&Nf(n(h&n|6mV)wbN zuW2S^JgB=OJukr`5UP{o@oQpa+xbw%DTMfhYMKpou%&ZiZ6)beY7%#C^iE>xT%sha z^?Ay3$+;q4YvDbOGY#ZIFp|LA9hpDHr`3^2L%RbsP%A_}$h?%iFUEK6x1Vpf@Ud9B zt&{!4kM>aJEEt(T{ZU26bHn)G0)9*IJmY9HFPQ2oVhm7rc+`<@9-74Q)v-Pj)5#aa z4BTUxIpTca^!lDGj$&~O&n7$nDUF^C$Z}c`c`0@%6~xf`=nC-x|Ngy4V4Nnm7z%m& zjJ_u?NhxdhoZ2aa=QW1xhF39LhzD6B?aW8__hY9IQ@nF4Ne=LOEaLr%H8dH>=V9LV z!h?^5#k_56-}KOAKpov5eiVNXrp>tj;T@Y@y*?X5Hq}0*bIemuYH{MeS&uD~)=^U> z%GdQ(W9E^f5Gj(n$2~3X``8wQ#;lCUG@`Nj$4Bs_MHuFSNAgC0%P54vptZeL_KiME zUjpdZ*PB9MR!c99n0Rp3W9Mbnk?Tb8zT%9&?pA2Uq|9Vf z?ny6W1)io*bDdvzB+I0w7xgy!c6K6Fx;^^QRB%W--uvH~NnOv4>uRM59p=g~93>oI zOU6hlXSiejUN{NezH61>ySA$lXH`UA%E-kgQ(4zI@!%v<_AoEstY}^bks;euHKJf3 zwR}&CC9?lrEVJ=sRHUDsCUe!%%vUc)3DR^UqAgxskA$O=>X{~Q1PysAJBji2p*ry$ z`Jw*J7Hzd1hn?43P@{He{Q*!GdHVkbE}=8s9m1D^?@2q|n46Gv9yEnN!$rvbq=nRx zTr3i+RNaiNRlB&-g7urT!W0Fcrd*hq5_oX|&uB-hVt}(W)z2=(9?gjd3lz;eR4Tzu zWIejw{US=MB2JyM%Dj%%^3w03CvDb!C3IM&kkwm;1P%)FzhgrxOeV}oKEyJ}NyRkhwsU!TnTZZHmSf#p5=CTOv#6gw3*+<|=aE`JASnN1;o zwV%sT&8$h*OE!)bVU^%l$`^@ano&4<@yhX2$ZNM5SD1@jU4H-KTG0)Z*D|!rfgDJT zFyB+Dnd5iqnUy2nY7UoXc<{F9a{AS?MVfIIBvbOk*uf_yHYkhb;9BJxW-GN+BJ#_3 z-?lcvl4e?$v~x2~H#syTvZcfUKS6gdf@G7JbAf+o;%+jiTBYZ18oAlbQA@s)_x4i9 z0loj+f#TM9np`X9f|$zz)2hrgvy~0Dv~QkND^#J>6czK_`@2k&C>Y4I8O07bW6q%aXy>bD*8OMNgM*@Oe98Q@Vq?F;-L%2O@s{cfObl_9 zZ~yVg~LzQ$L-EcW!YfpxlMLm8iYjwmuvq%76{p#?!0LOUE=v6S*fLt znjM8!ks~X-&+Cz~3v&x;4d(+OI~9f0W8r@RX^&8;ksqggjiGZ52SdAeY0t)?X*SC9 zReC5%W1#UwF>(_<^FKg6|4%eDdg|Yu&eY#G|KWp*r?aARU$hi{@cl&~j>3=@P!2N% zQZhQwQ!~v7tAB#7#F+f4`Xkp(EpPo3gyBbL^r`s>ytF6fOdzySJ1skdSZ?xKMRD1D z0UKfa*yPV~?SNZI&hKjZ^=CibZg;o8yHWG9l|)~H(ND<+O6-Dv|4As^nsYrGe{Ob} zjdyHuC10gG*ITeHBWil0-Mb)@rz}6KVfI{DR92ap$5ZVx;Ps|tRrg!nZT_fsk~j!@ zzg44mpxV3R#ahY?eP8wwJ|F}{sJ)EVp4m(`9k36~ey`h|R=>J}9<^q~Q_-7ppr99 zCKUTvWVo}m?NMIKrFLTC)MNKWIZVa8s>* zA-jF`AWlmRo*j{y&71piB)nW)C1nMFI?O1ly)ke-Vkw%-#9+9Sk^6};{=o1@8lX%) zQ8I+-2cN)$Pes!a;~+&U}C9p>|oIl zg)^mbw8sUy@Jfw^WEX@>RL^yyL=)WkC{~)vhTh&(D*hm7y2F<;{Val`{ejDYv+g7Z zQ>wnIxQ~zQVeVRU?9{g7LJ^+dU~cfsvn~dP{TM!6F!+>uWvBW6mT?y^$%}F9JoA$T^?F${9#^S=aWtdb%?Xk6UpyH54%EsBtmg;_d<)0k{C% zeV(==0(-`ATeWN5!YGLAL_^|q zMcVsa=kSR=U$45?(oFpcizf81Ul!zM8=El_-;NYM9jrQhRhZcham=g9sq4@$?a<;? zSc?)L4#RrX-zk80Vg_Z5FI!cjd^voVsJDo$JEoqE@s?cXH)MoFB2usyPP33Sur*JT z(5*2-ou^~x(jvP1<<_h7k9)YQX6S2Au_Qaj1CDr?05U~XE8aHhf&6Kc_YC4Tvl!1b z+B(2)&RHxFk78~bs@1#15zL^C?O=`bm{t%%m&i%=wY62fNPvxZg6%x;Ltqvy{H~Js zC4q)M_y}d?F_#oYM~Im<=LoofTju)%Mr*W)aZLeXmfMHkn_Ey!K+j~Q51({LWwTZg zQl3}ro@aAGs4q?AxK%a%P=SVsr`J&3wDUg3Ys?q^^D{4#t@oBaEnGtxDU)X35_!ei zbk9|y>KY6dpvP;-sPqGvRg3d(BcszZlXAUw8{fS7t*B+n8)n+7A-7RBt-6Tt0Fyhz zA{atsCUwbLG-xejHOirJlQzj84sR0D3Ya4xj#hk5N(K5ew;Lblgwn`hqSZdVD`N4jiu^m-TAg-~~4V+o@r*DYz;bAPSFIzR1eMT24>VM}d zR3o0zO2gGsIta8{_*FQQGS2mVv}Dja?OPGPBE7Ee#JzvG;0+>-77p0s4vSBLA?>0HM!6SaNH@$Ni)Twm*hg+VkVuA(d9=nc34*>Q9QK;z zEtky_v&Lp7>5m!)2XEyEXD@bM>menZ1Jp=O_Ryhia?D?J6zw0^G|AM7`T_cNTAKD_U8Ug6kf zkyT)FBkT$%l)0QhyTGG-m+pAx5abm&=7s#CXvt+?b8TQdEsS$J-dG+Ks!Gb95YmX* z^=u=?FiwW(1Xo5>SC^f!(Z;H-M_=RdN85#D(SwcsdX!Q+Zy`ca`!H=)f;Dz}O?LZgX$0r8NKfneO)eZ=?D4 zS~VFmHP_tYhUcS}`+$7(8uilu>H&Hg|1w4-vgA(wutIS$?$^dcJi+5qKtNi6%*c;Iv1dzTVt|c-^P35uf9OlzJxEyS%8vLsN7I^9!`MGPxKM z&EpjL93>MeKIKG(xCPENbdicy3_j62Rjrv|pY#?j9lYE8u*7JRLo1^NPUQp$x5=TS0xPw5FTagFIM>72?! zTd*|l8LScAK+9kgi%k2w^sB76?gGL>T{hNP@BUfa_|7<`IuJaAG%{K&>yUom{t)50 zgnzvGD+l>jgyLi&e1TYUNqtL1Y2HVY^gQrj5;eT+>wHb$%RQ&StP5o}`gG|nwYQpH zv#HU8xNf`J_#jzf-z-{Ub0v_UXHe6tSY%UaB(?pc=K)&;OW?bXBrK?XP#o-8IS;#0 zXfpof^TfKtZdZzj^@`6pJD#RER40TgbNWpBeCc>W#5OH=FP``mzy})Qk8|+rO}etb zuN^f}3^81J)$o4cK`cY;teEj^_Lu31J8xFs=ES0HG|L<&>W!7x;1ep9>dqq)4CoE$ za?8>oiwWbUgPR(LZj%GfA#W$A@iwIOe@OZ6F&+=n2#@WAg`EEc1`uB`pt()Q?&$DwpI#+bCLFT?bTpYx!!i;#Ef}!ee963 zK*B(ZLsgEWtVK*T23m-flXe~5{o*QsKWSi{>{gXOXVmU)<>~F}3|XBa%kdYU%Q1K3 zYLmq~2tvc0%eH@JntxbBy_a8p_7j9ZpA9av2Mhu15I2o-g*mv;#*Y-x`hs)de|Q+F z2$g)^s%>iZP)dpSDIuYDpb=jlvQR&WJie7<^Mdp7Xm)d>qOTOr-7&{hHc7ectj*c^*@l4-K87)fI8a*2sp#FErz^eb>AD<+Uc z(cOuyrB0<^%p)b`hFQfQab|=7vvhwu!D(}Pd{yvXo^`D|3CU&%Z|`bmW3=Zn>E(L3 zT=QEUg;J)zp}YyCaN-sefoa39taYGwRCk&FzE$!Ez_7tf>15v4K4o3;^c!U=`92uh z>CR|@UT-R)ROIuuSM7}jOX?^}*xEhWu)K&rvYvzQ<{95j?G;|8`22cZq+O47!n(&( zLbHvhLTVYGG=pju4b(b}@CO45!NarL6-vaNwIv*GXp9d^;6oe_4_EmdFH>G%wL=K} zZyfk%JF>)VBK<8V0iGGp`S)QWTRgik>oDGZ+xQxCGNzeP1$4%f|^x zNQGbe?wrD8PNwuBy{UB*D6F$fYB4BzwT+lYiS?FE@vKa~jldObrsULn=2IC%qGyK^ z6^6N}hWwrWex0}E5fzSpTnJl50~+hqv2}>9X3#vGn&fQ%b@qE_zFQ9%u}hv>#jB`) z=^-cXoA<#V!w-skUU5WRA`skuuFJ9GnoFLrRH|P|s*!}|FXL&JKxHw0Nd4G3!{l0g z-Wo;wgD;I+{SXyJ9F)z3tgaGMZY)g8~+NfTDrkr4BB3KV|(GbTMMXj zqjZ0Q+BY-Wk{9awkf$P05+A3-iy#-+1ARG~+yebgx5`<*$6H!KqOqww-+0=31mF+# zJB<2PTj%nr2jdDmOKsc|4*~Il*^bIA1HB#xR$LzRZu;VEuVPk)dZ2Fr^-a>QkEri5 ztg1Y4WC0MYs*6&oJM4I~qmq)yG4FcZ7c#8)7-xcFnoOTpSJgB-8;&X)Pl zF^`7COYYrP6{6*3wA)uv*vy@bQiJFEh#UZK1R(1wxVCYncg8s^L-gJ`9av;Wc`ILS zLgUSJZ-(_G{=Tg%XR4F^M{ZhO<+M<%<_Z2UxrYA|t2zS_Y@3Jz9r0IxCR-on$~K;R zY-$Wmb0H2`**HexEDQ8dy3E8y^FYV>t<)A1Gjk;)CGtBd&^xKurlpB0x)>j7Ji|f{ zPxrtY3m?rNpdxD zx&*VE{l0e@wxhusv4@ul3Et*abxf*qG?wjgu}Rk*=2v)5-df)j4U~VxR*ZP~W?L^S zUKi@VdKPH%bgEmbG!^L{P0uZ(mk&CCR0@1+~3Q3 zzXh|TnuEaoXNqZ}ZC>`-rM8K6q#_C*#LjEK$eoALGftP2d~0~e)$5!tVV~DhIe*DA zn#&VyPECq%ck6=Uf!8G?3|SA?)=s^QKJ0QaCi*=Dd$tZeD!Zd@E=3}Pm3f;+#I!Eo zEfRZJf6K_ZlRvD~bddT5OM(e*r2_6bC)qthV`81V7y*@N>A`MZi`dtr-y|rFozK*(YJN-U{c3L*Y^o~aK+mXI5EXE~q&$;yFvKBejCqLj3#4nT#H=g;NAM1U9 z%<3zSI#qcleG~eM#D9^U-&r6>o;aXWOWowj!cuWsP~u{pZOxTAE}@@bL;Nw({u#&9 zZ!0q+nNgilMT(A0;qp>xAudL2TQKyvL9dler01&?J?X-cGmMDls)qH~&nx0-S+C%E zVYN_lYmP5iQF}k`57FEa9orB_#h*YsxkHkNf|nGpZHT(4rMHFXn5my(lNO%Q(X@aa zmZG{E>|5DA6km+zMB+5f8Qvi#pNQFDRZe8VXtGM)&LtfpU3Z!Y%w$>h3j~6+i{OoK ze`diBLVIc&M_ligTD4SEC#7reW!j{XW!|NWcxE}RK=xXGF7=K4W@$O|zTEpHktboz{mN2{P*hE|1ql>GWwpjzYR_VO$AAOc? z6_$55l5$n0${5~+rCx1J6lBdwmLA;r1pC3@a8#>RqMTKUv;*_!!a``9v}2}JXg`Z} zbBE;#FU?O7hk}0&o3R3m&2?9b8Fjx1aq_~#PTkeBrs!U{n@D`kh);lN^!^1$N*(pL zidd|<+ggRg%EeIl1?d?=dpWPF;U_52*#>l-SXBC?wj6x1t)w4X}1r6rNS#yK5g<$3H<&-n?n?Ehf;M4rDub&dgn@xTfB{^q@(eg3?-bBH+Z~~BNT?xMa z1X&qv)e7nc?HOLJdpO*NAGivW&HH@$kiYuS1M@Li0w|%Nc*NN|AF}FdBSmG*d9LTSd&<_DRGK3tX~hE@thh|+RIiq zafO0CJrg?@m%5Ps(L>((IRsPUh?Li|04+*B_;&VCXns98E18uG#;a;Sd*ZdyH z0i&UG3aVywkyun(at?gSwC~q#7MsWJ6~?QwXrVZ8rJ-Io zFNIJYdz})goZG=lIj9|O9Gu@(jz0`Eg)`NTm8q0jfNPX!*2vAR$6uFll0_G2Ox3gX zn7$J35D-+fP38)A&s)&$zF!yLp=)>I|1Cg{G=+lC`hH*!XV;xYXTuN~YBf|fUV`vF z8h6-HO~(<=b&AJgf?&$)3b2WN=cwJAD%o^X0XQQdyyWUx@OHNJqLHs%gwTyzG#;h!Zlh_t?c8p zq6Vn6`voUh8zn|w!?c?un=g4$Jc+=#EL47i$Q4o0%Hkan_AMyf=;|H7iCTmqScag* zuro@dpCC-=SBaNDLE%8jNODRY+NBDBA$N0b@1EfnDhcH_zRkW-fmkRXKX2_NI<5KK zpjBEkcJW0>vS8%ga72T@W&JWE_w+~hAIA>=;J}iGK0fS2WH8-e+k>(`iJQ4ZQF<<*@Ee}kA{9J zYVEx6J4;?Ty2b)J)@(ebzV}N>>R-u)0U!&YBiQ^Xs*!e08xC{P9Qvp2n8^g|?vY%> zaU$PG8hbCLH-9NO{xi)^6nJ`n(FzLtGkLi#!TORDMd4AaDNPq8-((t^8)ix>1aboB zmzgbxI35mlb_w9HoXf1k?cZK<`-4R?&`Bo$uWD8p28(v|$cd3sQz zGsNc=9-hJ^|B1d$SZMk)s9&0JuA<8;azU}CaTsEkbtlg=3lFo4&Xp>i2a9Yg)vf$( zV$*)LUof?+;GT=cs77Y`Zey3?_mPZb)mK`nCiihc+4(0y)b1~9+#VQkp$+z#^U8|@ z+0?D)wE$(&;+^aDbtmQOL?vJ@>-E6_+>%FhqjGOI&UrE0$Uo2;A^>dQagV3vD%ZaM1jrs z*%fME8N^(d8`%a`6rP{#aM?!GR2(Ko)?kgDS?7?zrk2S@9?-EKVSkInb!mj4nCv$s5d@AMma+w5KtM z>Vj3ht!ck6vq2_Pv(b9LP&ay0G%RydSYP1>&rh?NRwogt@2jobo8G#u@4W)EA3xf7 z-wY}6z*pz(cIz5DVifb)*>0frJlHX?4?g1IRP3Axn(kfEYZ+6~b^o!^ka zm2Bowz*mv=hJwN|Zn0;1oN3KY0c>gZ(IoaTwzjyhJ!ap_*t@m5)yG%sN#2wvanxTw zrqNdkc>CF`0;{~#6;_2648K?@_m@^)yNT1QY&=fBzulqFEb#8a@51nHv|rkr(9n=h zO?&+bNps+}x{6zF`{T`S7$OCeP*AT2x)&PGY@HXoZCf?Qn?8L>Xl!qa;o|ZdD^)Op zC8awEhtqOeNq9VZX2AKu9oTSx@#6?nJO)v@!BOMYB}adD>*OligQdP6q_QJx)a%%} z4IR7H+QyHa(J|j+K)tWj&A`WB!{0@{&Px8FtQLFZ9+q{8OoSgBb=$ZyMaOorbX}Tb zT5z%d;dmJ3g((T@t9pG*XO6<{uYRI-Si^!MtGG^L(JP-BAy#9P{`ZJrrM_~=aEy)A3^tjlyMOco~DPW>pg6Cl3U zxbYKwFm^0X+D$PIk1Lut*b#B@An-hDcZ(a!b%~xJntpZYBZh~3{yTi zA*1{zMOP%%Nq;X&-H0RcCrIvt1Lorb$L-H}>=juF3E;*GE%@e1Mxf(!?i!YgD0w{s z+$TL;A)?9{9gD47te1<(Ht(};)WcUw-8dEkR<=ePbjOD|E~V7 zIsNf7Q#_AbEw@6MiU^0(-E%y;*sb@=utMVuhL^yWDQYn$chN<8njws~1t3biJF`ud z-M3Bm${IY&^>tH}jCSF67qOQ;gaq_59444LeoMF4f;EJ(vhn6a(Mty0X(fvxDT+ zL)Yk~)Tc3Q3lw?SwWAL&qsY#B^NY4#?+92?QLfgJAuO zj7NI4aryiQVUm?DmwC-bdjknu8)R$jgqF{rAbS$N8>Lukq4Upqxa}`rKO5cW>f*|M zN25IV31y1;YP9zTpCMO!^c2BYEK~}K&Uq3J>eq=9X8IJX6Vk&0v?)~>(8=2?eGi0X zbbtDbUvPZ?UAGB}1PUp#Z9&D7UIPw@n*cyg@1&IN7nljig-^xM`>3xw0d@OOR361T z+Dz?Ps4thV3l_=wk|R(l%#?F;H2Ffd^0DS(JbDZBE?*fS0JGtTgQ#;@9>UlXnQNRe z;)I6@tD^;#Ek_cRp6wi^_ zx!dZIuOVL}aJ~ylTw=eA&d(Lht~+ZoN_Jxs%>jfvfi)BVs@ERz8uyW2=XZt?*b?^W zOlQV4zUg^bIqjE6VGh+LZL2e$fiHYM9iwBdQ2Kd6^1C8b9k91W<`#lE`RY@YYR67L z?bispH7X2_g__)Jj-cv1FNkEyaCF61r2Y)7M~)YqVjUc$;jHDPfOm%xA!c+BbIy~Q zYEyCywYRM_{P)+FaZ)_@)+5EbiO%JB(SX)L^o;U~tdGz(k)$gF;Ew6smY|HhnFSkN zAA!T|tJd6tOD)r7MeU7SLJYN2+8kua=Ka;9--Z{zmKFC&|flpgZS)u*a9PT0^AQ$#{Ou zc+jj2wspDAz}_lczN{f0-Qz=?<%7wey`A-pa)AVVCdf&;g(>GgZnw;0Ol2D6Rp3I-Q-i@BGQ{TbXl zZZLjC_Sk0mlPW|*(e`u+;TDP_$N|^4XmEN(HAVG!Uwuv*y`#sT{^3_aY+4>JEcRw( z_Gr3C6PL6d*{L;GNzt*au zOl^5YECU;99;3J_Kh3$RLPk3R6}y^rp3q>MLvB1NEF ztK@TXguY^#UFzXA+J1=I>5`yrus*#Wk?`(w6Mb%eA&#}kejXdTlbg_$&^&Po{U)?+ z`3f>EdI^k7@*WarrPSyflz2i5zPG>LRQq1WIbV^PnEE53tn=XL6}&E@StbXOsC})O zXdw_q2{EeT1M0UO5|KE)3zu-CeWp`w%n#Va@$um#5ITB>*qMJ+v z-;(VO-lao}g;9B} z&{~^pGCZC!7P2jggA^_bTWhGUw?44T{*uhtS}))4nA+7FMu+n#)zsqFvv)y@X2n_Y z@eN*3#p56DJ2wu{AtSZK!-aNbd=FJtQswG;o_0s3iRp6g{HBHktEfJ!1e>&+kCz@# z4N|a+FmJuZIS5Lp9&;f(^|Z@XB-=Bo`tZ!1kEfH0gMu?QJ6Ki|6s(DRXAoSFl``Q7 zsrbGH#rEOP`j#tW0d@tATBKVuqJ|rILm#mc?oiBl60o3 ziPEgrsxeMuMGHLx)on^o6{0Lp_zqpDX;BAe40#Zi7_O_jT5BEpM?hK!;jDY~((qwZ zO{4D%fuNVnnU2&=3J)+tx+xPH0;dS?I1uke|$)W+`X}jlf-H+}S91Y-< z#C*{&Q%R(YcDg%&ei+aO$=$lL8D$jEe4_s~hO*};$R`mL_U?`Q`bs~Gv**l(1l3y5 z_HIFTwv4kl^|??Aen2|usEg<6q{&#n+N_K#)7bR*Ze0Y2xhLd2@zX#w$Ov}fXVlcG zu&n&vkMu}&2IpF_T3}CJf(7oC5g}e$M+!lxyvUFtZyU?G2GNuT35BZgWE*J zo1Jqs*@&EHpSc`$^%t*8F%aBGYYV*JB@m9E7}GP@Nu<_9KIk!o65o^D$wX;HfG=e1 z5P#d$cVkL^@<#>hUl1)=M$Y6pAYw_)<1yyZ$S+N}q>N5zyS58RMiS^eApC;{$XhLg z1wzwRW=DWDjQ`N}D0ERQ1F1xN7_IXYyfFno2g6Ry=K}l zxU{AA6J%-tXdJFeBJiMy#wvjDdWp$>;|gRHW%;e)?iOUQ$wcVMB|Zv01H~1!s|dS9 z1_SZX5CHSjQ&(~0CA}s*KtUe@(T(#@Q0MSXh&xc=iw3H5!a&5dV;jo#wG-6=)af$5 zLl3Ebf{t9Ezkc6zR@j#(&w^c$h;vwC@m_1OM=bT?}1U^m25;QwTRFj z2SeP;aehi5vr)^LzVe+=uk&1(x!#0=Tse`cu`Bku9)nX^Ld_BS>#=HIukDGB>`n>3 z3OMJ{;jWSu`1}D)Rv7k5-?h|%hVp#xZp6C%Q{s>Kxz@!^jdAlANQWCg5|Y%~ItQgv z#8o<__ffI(AuLdJ`=Xm*ZooTdLCHpZuKA4ub0($vbT{Z}n|7ET$u7m~2ly@V$yHnu z%-cgN@QQ%$7gwtya}lL={@Hakn#~mn>}t-ULDHfnV5Sa#?J0tn7J6=X$}?@#BICe$ z1Kym~hWd$BH+jzWX~%Zzb3H3-GGtVaXIY%GJ_AN50G$~>k=r7}sgZb=Wt6XIJKvpx zG)I^%4v~Wm30sdy-@AzW9b7ALzPw6Ey4N2>%j|w!x%~=gI<=MVn0(BA4+(}lhzYNd z(uj}n?J>{U*>m#a>{-3iIjvt!f-4Hf?LL{FnS1mI?RJ)L*;AXxz3n>`?EI9>Kx_sS z)403gSHeb`HI+ur&<%%}6L|2-%$AsQc6DP=W){L*W0WsxjxDKHLmZl`eda4P@p~*w8l+6gmPD0Qm_D z&6}mopW@mPi703_=nHI!A0&q@7IEAEpo9fRfpNQWcy|`KQLz)s0A3D*EPY!WSsskT z!Q(r{FacoaM|Z~$-Oszv^MWx_Q7^wpb1ilA?;2sBjyGOEki!Mvs$-9QB?~~Zgr-l; z{9p`|xk1^2tG;9lhbk#OOxmR=@(}%i=;`xWf*F)}@dOGRHF5#O)(mO+haNwsS&?m$ z)u6YcTO+runP{~F3n4u}xNoyZ$ZzU_6$|b`vG_a;l_Zk!w!vhu{%WDp z=~Y?Iua=m((feezarUZC`=%y;7)fhV-m?geJ(6bREQahHtU6z6JuCpQcQW=RrE}Q` z@W#txSf9xL54nlmjY}=gm2!;Q;==_g|JMsvk#o*n0?6#-%ae|I%3$d++?U4&g-?lt zVWlRI#`ixmAqcoMD#fK=1GTMQMKe#1dxs8=;VbS)M&L)C5dqjs?=tFE7brPgML#ry z8s#QeG&Ov|1B6e=XELhFB$_9_84<&*KJj@|UiY~__*ktMlJx8r3j>pqFYProf(VR32N9UY;`pKa~KRN=o#M+ITnutPbA2C`fHdc77cL;5Vnm?x4&6rd{_q=uuwV?cBa-aBP%#Qlgu^>^b-Q`)=7u3Z>Y!Dq~{Ju zMhfxt(sORXfosW(TZ(G{F|+odYcE@95bMP@cmQhY4XyMW0OSsgyTr=xZn0hJbg+*U zh5sAOjy(zyUux?mkz_q>T6>Y<5?N8aWQM6pH9&it!KzGFj+`jv%SgkdiawQ@cFib~ zoatt7Oe?}EdhS_t7_(V4a*=lxnTwVZXG?mpGS&bNfmM|`?Axgc+IJ=kP3{VG z*hQ#k6VRs(9Iu)bmg+@;Q6vDsTD<(m72uJMSeNs*j`J&VKc83(x?10d?lpN6)z`;s zj0oRG`||BghHnGTH@kPL1_qo9K_;Wi6dy%Kl*CJGO3O;ca`V;tBZMEO^M7ZAQSCi? zg+a}I;{q+hXn~c%R=s41H$-;8(?%akS)5&+%$utH=dJDi^*qgs2BYwn8io$2sPoZj zc~(-=g^m&mmz0B?5wYlb^U9{f`UchB^XRtBM%6s`zwT@MwB7wa+w3otor~PN@m*SV zSJxVpJRGH=Q&e(u&Ypb9x9Pyos2zqc@bxC+T0*LSk4HbyvH?7E zQPDud4*8^bZf8Q^%d9e)p5Aw8l~s(cqP7FmTb)~1)N;6kT z==UqU&`W0&sdB^_yWSA-)``stupLROTJCoJhYLxmVw(^N@=0}sA zFJl1gE5zrSr>6~GteCjTvM4Q{QmztFuu58)Ci+HB*3(eZJ+$Vk2lm>N2l|UbEUNJB z1yp<=0=7LJX}EWA<6We@uZ)ax&T4gOM9eKmC@Mq(Y`H3i5ix0+8XF56GpdSh5wodI z;RlM`pEQn*zu{2%>B$Fhj=|btWJsG5A2y2fc!;g);Jzp&ta?h(*E|0iajRtmP`q2Q zNt$JNB>SzoLE|ULGp!~$)@NQpvZV&0cSK!Vr-#A@_(ofsz63I!f?b^69i%h_hd|Wv z-bdOjpITaPl(?L^GQoPEK0&67z$*N`oxgfTFS3Dr!1)47DQ3hSStHIOHOg^zEgHtD zRBa4N7|v;`jG>R^*>f9UJt0rzB*KY)yo*c|fAYBimZ z5Cqps&>aH2wH!ggoq3#VRua8pt)!{vdTG4t)cpuejkq^g_Tt}& zgmDq81z%YvtwP-{)Ds5(V1H%wJII-gsmRFxD>&-^`93Y2BlKmAiT7tXdU|@=YbOjR zh6+{wJhH?7!YgM)Xmcw12OnzKxWvdDOdja<@V)s?BFIj^Z_&F-5D*YK9@-exlx;|< z4OunuWN#@EtSmW}Ehvswh<0VU-T={5qs^=6J08iFQm^`3bDD&b?-?f$mW)`wZu;`A z;+sS;e~Ms;!x>gplc$NI4Sfr{4S}oBbGo~C?3JnCc7kfe@j{pOkSYaw?Vm=*baa`L ztL@509)IL|)7fOzYC9OW+_WdHIy#Q}&dJSOAO5^(-p=%~V_ehiRJ^gh(!h}oBN-yq z|17=rOS;Pp@#&ztw${enpR)M9zs|!?9ZPy*?EcuXPxbxr2fksxJrQ6nlkL{_eb7b` z(cxVCIe~Wom)GEh-kJ_}6pwVQ6b%kl>O&S5<|@}7K-j^;F}t4KyF-8~T!KTe;O_1Yg;n0Q&*^>6 z-luQh+uiRScf7a9{!n8^O^JaU(lXaB236(TAQf~l6he9^tB`v?~C!= zg~cznUS};!o~Xot_IMt2qD{-*j-x{}?#FQ3cT6dz$C2F>75?rIEW|eQk5MP# z6)LqU`ogCbj4>(59-wUxSBkFEK1`MumiQ!JWe{>Y+d^n#+$wg{Bw$ewojI8<`P5dy zrYF-t)mMG~63{DE8kze5Y$=a(6$du#ig*y2zdNG;NQgG{O;U2?&m zeNol(FJ!HVkg$8h!*7vO$WYl?nqliJa0 zt2($EoHcyWeWoK!=KOS>Sb0BU3+7XCM*@|h{I9J{N7n^8J_Jb1`uHa$Hiq?V%AL-9 z6BpAc!J&ZvAPc!h)^a{!ctKDks z?Q2?Vb^gX=fD74KlVr)E8~L)&N5>TC_<4T2zL!F6btUHGZZW$uxz#+O@)+9>SDcMf zMov0BoWsMxV(Z*U{_Pp!vRvGxxfwYUA~LaGAYymKo=Cw5m15>%8ZUiatC8*MM`-wT zO3|cgT@6E+Fvzsv#*_7jDF8y@;Sscqz4@`F+K(=qATX~<>fB_7H%pXD1n#0(j$P!# z>FcDEqYpF1G*Xp0H5PF;eYSWNrTX88IR-+r2Vf^ZG$Va>%0pmPMbqsb?-$-mOX4hc zCN*WZ+nXHIb(4&IXM_v*np31X`rEK|Ah}@@SR<>I*epp5*w8tw?Jj+p-6p7k&0CwE z$#K&$eh$20Cx)63j%J-OJHX?>ItB+D@DAV7qaq85Xx`Nbr!G?)QM{HD+OioE)4!3i zrj{6n8{%5&m}lnh=&*OHJ$f4Bu*ypAf5PqWdV)`RQO34pGG|~-{Z#p3BEKl(my)JBL)pJTN#qGOjh++8QEjTVzkT!u!jcPTIxJ!*m zNvRS)91<+KwTCkJr9QYfTnJZjuI1)}sK#Wo+IM{^ChI7~S2r2f@nDogxQ#yNj!ik< zqMSDDYFLL1@g8l+aWvx9d3qqY`l9X#xie)QC0*$5teR31m9Q$$F?BAiBKj^Y263D& z6RYl(o2hQuyLBlIA#$hoQ3Iy!B}}p2)ik7hbKOJ{3%Sy4)zKqOCwZUc_NDD(&b*5v03QoA=ivEk~1#IkiE81`^-z%+0G0=s@RPU%PD>u^SXVC`>ymT{}4n*l+ zKSCFO)U#OU7p9v1*q*0pp{c6lxB=htFqPlZJ+?qbOB;MLaJdouY&gBR?BEP!-~)6O zh9>u-G_xkUssB!aAdV`x)^t75p4x6BZf-Qk~PCB+{@*M z3F4B+(SPkgnwpJKh5HNQOZj_T799gMmLe%hAzHeOnNRn;;DIkovNO2CqV|(|?Fj}l zD`$2#_hY(I^z{w3XTNY{2H0IqiIvC*tb!_pd&mZPN_{!EChMs)i*(CVal^jEur6vl zlsS}dcp$YNY<_lF+&kg1sz=tN$Dhyb=8Wv+ecmDGppB-;QUX#8u?dm)IJr+9K%3u) z(><2b@pd$-t7|;*;$3X5v1Z%(Q|ux?jnT3NqNC*Jnsy%(&X;j=n$|>fpot4mNxBHQ z2e>=U(JWY`fF$%x1zvN19YHc#U2h~)4JKMS7wR?c+p)jjsE&L7BQuz@itHQFr*z(0 zD);<}#mRTbmN!{J&qwwTvztPdV9*nx08EES<|q@YhXOoAo*xS7N8X!{nvLdE*7!(h z&9HDhX5bci_T+2cGsy>QCg}WONTwt(#_Y}p{({^w+83FQeJgbgEIhY&XXk}{zd&TI z9$YcbCLXzS?BPUF1{kLl7#)5jvSz}Hk#D0;A{}H{EGIG=LsbyUQZ>shs3Guoc7KSO3~ zS{tIfHEt5w<=WFIYy|&;PKz?GfM;C;zz`-45l=^zcY7v)lT7M`K)Ce>C7~I3`h{cA zUeumIZFf`+*~u@^)J^mFBLqdelY8`H2gmXq2mHpp@r#Q_W!StlHO5rs{v7FpFchdY z<8cZH6)bMX<$Onf&X<8%GqLOV zey=kn>m)i>@mu#HmPs@7IC%GZ_bMVk{v2ueG24C#A4kXfI;|M}Gaw$Rq{d5pcjn5? zerf5>#uhlYIvree-2#v%&=CION%;XV4tZW|+s09&ujUMF1Xo}0I?b`~es12TKh>Hgd@@Xi|`ln zI36fUn1BL~|8v&vly^DR3Zng?b9hRA}1gw7WiR+mUhgcpyg>ws;&6x~mgtK*4JDWnL@9vbutpB~w|DE3A=|EyJ~i*y`NS~euxMsf_NtHIty%Is764G=Y8`6&-Q zs;9;f&R1^F(m_#V*MG@_H6(JB_TU~-LtuBmbKp?hoMtYt7VxLb=Y0PZgO{Qo)h%?A0 z&_2O&3|F5?$5!LVibh4F`tF1*8tvBo@6 zUEyIxuy^ZsJMH`Hazl#MC41{z#Bu)qzJ3o3^HbD1u-vn0pr29uBVKwexTgd8rZzFHuG(N_7 zU3!uIthS*Be)IJEn+$jJ`2H8XJ*^V$LZw`oh}>H#d;IkLd9D75;W;+NkV2ks>yc>e zAn_D7C!q{kNol9c1=^P3^Wx6Bq`{3JqVjf~@J7xm-7KF``oJFBH378oEyo5ceoD`{ z7V@#ZGJC!P&WsSzoH5+uFLc47{^jKH^yP8BwmTT_9G*#v=oRypWYsoFb<8xup}`-h zXC66z6yuqCttMHEyWrqZy;Rt9dT;PKIsc3bEs#9>9pU(2UKU+?`jll>?Q8s;yRvZd z7!I28k&z`f&W>IYlEa$Hjg&Sp9#*H zZ&qe-6Oh4sE!@Kd)=A(021r_*@%dEK93nsak>ABzz~lEKZEYLk`}C+<+{9eJcjvJN zwgkT4Kd&C=8E}qc6_EL;-~wj#?LKcc_yDM;?ajjjz*<>wi)k`XYA?Z`7a%rILlp)R3L%~5Y~?$(8kl~LaG z)J*YJF)9r6G5T&TGsxoOfcD=H`S&3y{T^*;QmazgRmuFWRqs9z2JM|B2uZAF%QZBf z(d1S4<9Pr>e(yTEFtx?}oURqyW%bh85I^|O!>0y6DhoBi9n+W*4{X$YO&VMtyu(;i`*$R? zKZYuAAl)5?hY_1PRhK1&EvqYBm))m%H7GJ!UIiw#A?R`ZX)0Yp(|#5xoHaD z_Vw#D=QMc)Paq|TZA{xoxI2lTP_xD$)h;&nnyESeQRgcj)%AJ9lkJAn>KW=*X~zYk zh6&RnXm+8s2G9qkb_K2XA_;DVa#ME<&EpMM-3M=G044h1wj|%G0$ps(gY%)iX&0fz zp=pQ}^OxD)CP6*9Q`ChbK|6!jZ#gAPOg<{lI4f}~rC2gW zd_8$k7Zb`*S`Fq`kq+ONA_~MbBiuv8J^OALCx+oI5?vlT*#7a8U@Pechl(DPC=;^a zhljQJO}c)sooh;P-zdeAl~V7ZPjGRbEfuYMXPv-89Wv4cx~eFl%6(gGn(@UlqdJR6 zyW95NGOs$C?5Ro-#Ymdq#p*)S0JMgW3Q89aIK}<7gM%$zRQ2OUr}|8?4a4^EFCXCJ zB**6(U*T&tJl}hM%9%XLVZb`*+RyhV5|l1cC?xl$00^Qh+x+xV)W{#({OZv=_8c`b zl^yue4Z)(BcN5RaXv6s9yPRP3`}s;PC(UWEFJ>-Y4iRnN%KMbnTh1&j=KJhXYhYS?f6ia7RoZ<14F&9MSnz$)O z<%)%OzS`xb#jE%T*3QK#;G;#HTKJ7F4T;XOl&x83?)04a5&ow9)@SdgWhKGZBK>%=Q%rl=6;^wcT@qQ0X5(E*?>y)^T3ORrX1zMtG(f(v1tH`( zHw@BHwdA^3MqO6vILSESJq)=PTnb;i@O{uIh%U&)t|`tVUmgrGvE2UvA=T5`c>=D2 z3r!P}yf0F$V?(Uc9hDo8N{DGAs8=;d3=FyL9{5u_!=t`Dt(~C$%9vvB_AT~R%pSDcDv}xfZ&nv<`KxmL1818DY%W6mDv&Gw9CwfYmm;As-)G1UFBwMBJbE z3GHAN+ns&ROVw7;CkGOHhQd_l;59j5h~qKz!IDTC_`xl!LqqdAJ$$6T?2;BTi}z+trnjjx=-I%N}xAo%@q zAX87{iu@rRt`l)Fe(48g=P%Q^j?L38vU+X#Z#6ZsRdP=p>-^M!uFbL!CkmgTNpZ(q{uBPXqteDc_;dU3)Dz5NBgAc9DHCc*70KuS7;P1K;!HnLneza3&b9xo zOb1Rh3gI&!TnnxUcJum?%+Ir`E@KSt(eiw=32pm&YX@QPD50;>c%1z#4W6Hk{{rbi zB3q99r|)7p0>_tXNNh}dH#qfJ*rB?1OFQzk{vs#%WFHm?(!6wCVmd^!4Rw2lNV2P^ z6pPwX1-^J!=Feje`LY@8zJ*un#_%G*Ao(^Bk!Rq zehSRU@hX|dz1RRKo^XBk%j@vHRy(W$6l;3NfU8KdBLNUun!IqG?P`=VCG_!(4_Y{Tekqm71B--XA^9#WLZoOd@y;T8kSx(cBrFt>z`pF@~y7Vk^Q8$6-l zQf_an96rGqk5hV$Y-hf>%7Q3oKjR_7kRHj}yMetJ^{G?-b~r~8u~1w<~OmeWOOk}g@PSQXbbXDWqIabzC9HcPIv8MW9)yj_$?yY&hP6Q z9K8|NH;3YGEL~^Hwe{Jw1l!FpzGB=L6vAaR179=nDG9~o^p;V66sckpJ|hcTGbxn> zCL$-I2i$?SJh8apAA5ALgyJjH;-Y7YqW7PvJNXoBRA(ti2T3q)iv#@f44*0?>60qD}o{2K#*m1xeOFqpp{SzZxqvZTdfGA)o>V3=cBp&J?>c9+KQVjH54w)Bco`%-|k zTxTBIC6u=R(b&$=Ywb*w*!`ABo!nb9Z+5{zcTS$aSm))a0z)UPLb~EPdmkOM>~}g* z`A$jva9A+xCbX5K!*Mp2P?rDe@jxAv;HBcnKJKmi(N5fBYMuw;p8=u@LH+{=oYu4s zEz$+vZy-SvvbpCfufYMHZ{4ULS-P;Ci|Q7K1~*RBhrUjG3!UmW=3Y;DY1)cj_f8aV zygX-@q<%f29_D6QL&V0e{3sj6zXXcHazXpJLH7JIREnbA6Z7XSm^f6d#Hz>B$2wnz?|X&kiz!@bwb5dR)TD8E+w{(N@;z$2g$Nj!pL z>~j|w@OSXQM)WGenp%xM4?`<(C$O=h#=xmR8!)#gerGY>aYM znZ?N-3t@|;#Y%(4A2VH<&NkjBVi!EQwc(**KIdQL9G;=+EiqGpibik66_U-hT?Sq& zbVW1SY?ACh(YhpCWi{fC)i7>f8{DH@!`fxea1*|1!5k>iot>TEorxW&s#^l1+$2yi~<^9i9j@XgErpK9R& zh#BO7%JJ`+(*IF3)l2F!M|&A1yZfAUt2o_6StEPTYG=GfglFAMgmrUjbiCl^u_v5z zOIZ7LNZkeBxwS-BTB)oEp2nM^(%fnJLfHa%YJ)XFmytp?VWK)_?{h?tnn#zT&f@Q; zUgdN4zb2i_m)85u5DoBl;ilC8qw%aVpKx)Vh>HAT)xl4(DU-o1s|$^0@3RGVI#9drp_h7ZGKLm4HH7-BF4I<4ImRp!GX|kv6gqGi!E58B zMP;MNonuCiio;B(x`#NT&NOcsCXfWjA0AI6J9R5JKed2I#Mm-AN2!-LzO=oyv2t`~ zO)z_;NN-<%GiZ~1MAO>QVX&cjAT>E&zf>P@WGPExYCA(>QK9 zILPb>|For&Cb~LPcSvnZHjiYZ{&-ZQ1BV~W2ImLlJnPHttz}tGrb}8kMHCukS{-Y} zY>2%hIVFWAe&;V_jLP#CJlbs)BPE$>{V{L>Gd+EH9>GLL<$+Q@Qg4sGB|$Le{lv?+ zo@gd1FW0g)N(?CZdjD?eK+lBs#&c{Ug7`gX-vmGb-SH-}16lC~JOb~#ZhQ%Bz!iWK8L0=9ph|FNv( zYzSuuZR2{^A#GVK2eT#;d)Y(iYeXt$%sWmkZxyLH)#rCeY0smbQOc`ft&I(Bpst8q3$95?f zWtAi?&_nZ(>wdZ86|#mL&+K2ts2*%79<%1?rCYOsOm%k}FprUEX&>b4&Dk=4!_EDM zKiKXkxxKBA9{@3pJsSGLelD8o)VBGiCR$kO>zf!BRa(x5kOw%VOetr#Hg4@LFf(NC z%Gxpb3;)%DbV|v49W7oRRC@4!?1JNv>Uj|jd5dK^LV?}vu|O(ytO;FkaOj%3 zXzi)M<@Vf0-7(pmy26`GqK0$c{B!pj;oj&PdCqn^TtdXUIi@<$2A zy`I&KeSIYdaMfLFJp%V=#?^L!=`(DSV>elA(5AWEH+78A;*pyz5MgW3mOvhEYhFi8 z<&N6h?v<6A^90qyZ35n@MurzZrCHfcn`bi)0j-jL=dB4L)vaw9-K z(%LEp5fSZfGaaKoF>jz<3p> z;^Ig2bCX(cCTs^_iEdFI^Iz=+(294^f^iJ%OBAgnc zoivgL?;h7uL$YsKl<<}7;4lDTuewg5JFEZ1xVGO3T^aVwTPeCY<-w3R=4$@%nb!@r z{>!-?0zJb8`HjToYVK|d56e#&`cSm*9wrmjmf5+VO8aTBw(8S@aE1*RrWS&pw9ib- zqf`M6#d1S@fxWAvWYOI$sd09dZ6gU#mR=N0vMDwG_9WxRqH7qzLv>t3F>&(PM+=nm7j}D7)t7Q zoD8$Ob<`zNx!Z5GviTs~2gaI{%^yM?I0G_KX>n+BrTy0TuM%u?XRDyRvy1ojY1(T< zd|L{32sBeo-Za=Pk1S3E-7EAKv>4;JRm%Om zv0|A=z~2A{NZf5tFGzy2C@P*7NNzJ;%^IFGH6u2`MC@5big;mDHYksbQuMF(nI-Ucg3ow7pA zrIeL9@7^EsJO3yD{l{njpNc;ILG;PM`y9fC(n-x*c@#boguADSR}VkgJ|4g-l+%w5 zfdxdqh|c7>Cz|TsB`4;$m{pgD_?Vjy%|9x}oVZ?86e*cx_tTYq=#Z=!aEFw#?Z%Q6 z>gjK$Km-9(RMWHbThxrh=Y6`tNi*bUvxqW6d386Kbx#KgmLM_Z$DvmwMZ4D%i%E}M zT_F?kw~0ShwKS>4jt0s~^Q@c^q4Gg*#&JT8%oxrT_@~37XoIosd~zuF(aD61>f<}g zakr_(M!YFJil=80p>N@F2NPWi2@~3cJQ>T__Vnvt+ETvcAh$DZYR-*P#fgKKA(ROF zuH*TZATi0@-E9l5D5n|yJ-4Tr;MyCqeS4Du9`F}x()bQ0No7JS{IV};&UqZSGHWtn zJ%{Ud^Kfl#`L26v_^&{~7_dczHltR3p0K*LS)MYkZ?M7kpdWvyp0qB9i$(Yg#P`W* z9i5I2YQ0ghI`F7npuY5=rC=HXQ5ZFYvcNgiE-*MQMG6fQdQY5oY8`T-pJC8O3W_{t zOPP?MLnyaTiug1auY{8-)s7ik?PWHOJqbR@R-YSsXpTC@MPK%ua`YA*u)AUya0Wc$ z5uQj~S%szuZ>LtCS46)+gxoS_IQg_;DG#NoZvD1?%bEQ4+7z?+1){!4KmYj)#M%YF z$*T7Lmp5S0j;cSTFpdA~Da-)hsm02zyHelBRuTmEz(BOIv%4g~?W=u0?I&MIiWwLT z35Zz?4eq5kggHkYoT;F1tU_9;){|BQ(^o0_LqL{8pN|{y-&{1Fm9@VF{4V{0b%??09iJ?6$5%6nkRvx={<4QNyQt(`1Pv=MKbT;_z&@_OMJx;Ng%9ef9ohnEM6|=e)5|N-UKyV@dB4KFO}bPW}0QP0*P# znRsW;wKs_-&;0E3pKO_KsTsl3IZ!JTp)gxDG@T~v5U-g#@IcF$MpZo$ReN(Oe&Ko zY(z_$RENtsarP-MdYyohpN9kiVWkf#6qs;R=WNCIiAr;uAXI%}DGUQtzd${wi$T8R z#~jUdjwOiX*MKO3Q@u-bEN-O z_v1Mi4E-Hz=Jo;j{F_b}!+-_4kv-e@jp9bj4YLX@*saT6vpyOgPyE}2+9P-{qR}6* zVd{QhH^zAyW=~ZmCVh0Nfe3mRKSS>}HOe1(th3_@&0VhJ8fMoRpHsdcywZL^n=oyP zwvtxMZ8ETB*^c#GGtJQz`kaBlT9h$@z~j2H2KF|0G_n#d3? z@isEGnnd}uBCqnbIxzck^hZe7~F`V8ntipeK=;J@6})2V`3_b znMmDSfDGL(UUookrKK458`;%2gUMVp9B>71{;AV#Wy#Z>?g8{=pF$Q1fF5ijkIVGa zUm(G|V_=?&0I1Q1{X@n(VxLX<>N*IC#6QmJ!KU`vy!{0l0Kh-8KuMhHAGW25{G(7h z_@4$sY1#i|UG|R~CN})TURnOh1iN3wAI$%G!|ITK?p5|5EmYqB>$v{E@tvH7=m0AdP)ju^8o)PW3*fiI6B;Uv%LPZyk#>Hiisy)E6Sw^i%s z-z#hOG<((k=@d^$C)!75H2o~$@6Kqxt(OgBsky(q9_%>0p>?QVDEqni=)=TN_N`l7buPU7ijTm6?5Ew@QEKoR@MzW__Parv!;5X1=$RPT=ryOS{5|-;olxd zAg_pg;%Kf`Y3@##@9VH~RRYo_VkW;p&C)bxK}h#*Aa)A@{l8!v1^r*T_YAuT=v^-w z2;T5G!%X0&fN=z~Oe4vJL{XZ?~UFk@bDQK-rIBHc$U2rkVaT zX|w-SDvC%m=GF)3)uEBV0f+#>fN~cwdUP~1BOWbBF;_Ikix;Bd$&H6GuAUYtXF2F4 zs)nq)3w#YnV1KzIfhvpT^!qzHYoWQjNqO1S$wp?@)x+NutTwCaLxkuP6rawM#%X>@ zV3XE05J3d4MBpMA#bs~}rAEu)qNx&83LQ~9HfX7qrKX013UwTubXp72FEyj@=MSK) zo2#LXC-0k`TCX!N=ry-|JPRZ~eT-=7(7~zC4^a+%4_|z!hY*JxxlISNso|xWU+T&p1zVIqvPQBT=?_c*d2#$M3NC6PesO%QPjA(d-+qm= z&=3wg66!)Yf-(Y>lrYHKu$KBvDO|Bd8H+M0}5cue; zA<8chbmB6E;pB`_To{>^<%NZgzFI|kOIIl?)~uUI=R(%oBTRlV;A>=U@MygGskhY_ zOt?HTuz@dLv2ODL8t9_8Tj%a&$i$*%^vgPmWjT7czE}|IpgcMlS%1~uNrUgrknNtV zdh8aousS$t8~MPP$h%}V5nbJY$a}8h!P#!NOt?-s3;^{}@sz$!{d#^PVU5UK@@S?( z$PB$1=HiN*m|oS;NDLu1p7;3vB*B*^+$Zw})myIdc+>BIRJAesV5hZ%wl8|t;hFP7 zqeyp~Tlzx@^2KQb{f$M{8Y_;k`=@eqTJLcrIiR3R;0dcWa4Q5R&2^^aLkyxoUFAuW|swr%4^V{(!cFBM+xt6 z;Emb3$DgldzCw4yggpcRR~3W{lS-))*3GMkrT~ zaeVNRdNXR*VJzAw%n@SeHuVVinej9oEz9nnW2VI_h0TBqtP@rru^G*Lkirzp9(8w4 zX8b)uLj~X1D%xGI+hcj_9Rv|;<+aAbH+aJLQ9!oOf)>rWcPX&D8pjZyb5_5x){0}V z|IlHmWV)GaLtY9s_@h#$394&!KiIW}Ie&b12#LPK84t`GeY&QIM9gqJxBjF!$_3Zv zyNR-Ss(Z!KxR0Za6a6%nyvs=#9dusgc&iADQK)G~{uuM`rL5}3>q%VxEHE=tP0y++ zE)d5q3>p^)fA_2o%16Xub66_`b7jeuByAf@qCLf|{gSCi00liTlJ!oiD?z-zA`yZa zRx&hfZz@D_lE?Q(AWRim=AJXHiAwfdeHKoIl*?Yv9#W@Io&%~7j-#@Lx=J`FQhd%| zAQKn~+;3n4_?fNf=wq7c?%>tjpx{Foh3+Aq%>qCX3Gn9s_} zYH?I5g}tc0*V=8Bzx_sMcIO${KApVo!$vHr`m0wBxkBqF7(II=jY0AV@l||6%VxY0 zF>TI>EG>79@oYG2O|H4lmWJV`>tOg@s?6m zq9nb+#ffyfF%Bf7yTne{#Wgx zaVHBya>oJ@Me}pBKGoP>Y>2#QU7uO+b#-HHLC0FhG+qPn^3e1zkUIQmJvH_sHBWa( zAbv(-cM&0He@@>3udLJla#BXuS+^Y075J5u&Vh2GA9DY2#0J8+(yjbv1=r$jn$3$o zt{4y04yzgrURo;?dP4Nxv86Y0!Lzys^IWE-+g>dS{2MRq6}D@BT(-=c?Rs5NrvhE{ z6z*P~@uWwVo)?KFLq<$7Zq(@>l>6^*Zz~^|RMd)5QnMwE`iddY)gVL>h*xJ`a<}}R z>@F;2a11&qIac9=3P`L1Vd8u5&HZ(++M_7VAu zsrIe~@75J%_6TO%FZS3Q;6Z1VT?p>2U42jpT9iQYU2>7l&z)IK&AM-C8z`2M3U()* z*kY&#%xjwA!46nde$iIBUQEMxx4m4=g1)-qDvUGZz$HB0a6YLlej(1#=&_t|_#sr_ zm?XL;sm-I_wyrs}zgelF=0!slZMU%Av5LFk3R?JTxIo*ka-G&@BTRRFbTbNdtiup7bAlGUmS7a@M_b-|zXRU)&=+O$u;pUP{8 z6+ex$ZxpgE_iMp)X$f#xm|oP^+R>=_CdD?#4xM~B_jK=Bp(#ZM77C*rbN}iCftMVH zi@!i9*C%keQVr7Z2vVN@p~*FbIq$z+BMIEJ{=+UnpxZC7EWK)9^VzINkhRAM>YNXn zGQV-H;TIbGgInDKoq3&kS<-kKpcppgUbGty$`Cjs1Fu12EPMFu%5GhrUhep$3vD$T z)FnryT&e~?)(=RxB2~UiWN7qfp=LgC39iN!%890Q7ee_GIXPW>wCSS2ZWV41*V5yUOQM#hv#oA3?XmbfbE3*F zjDsJlXgZRMLy%Jik;jmbn?fww_%>0!8R1)-U7La6L`-$DZLpxbwdL!nf zKP%?k-E81Tgi z>A|r2X9Fx~NfbdjOC;n%aD zPXg!LKua|7F@rw1T(^um=ohbc(r1(GFHFF3T)vYu(YiMcr!^AOWmb((N(`siHRI~V zIBe~4OSq-z8sB6oP(&|v3E;^3`3VTpB#bP$8yeJytXvDd=WuCAE*CafKuYUAe@cLc z^X{oRty5V@6S_LC>^b5WD1LrSVFTXLzs#g^t*zM_lSY z#-1cm-w?sJh)+q4piXWoyiuWEKCkxWOX{2$Kh`?^E8BvyQs2+v#Ocl}9IS)f3|;va z_etL3GPE(Xcd&-Vj75W}dW*if&$dr0P)LM1_IM?1+>@|Y?3bs(xNBaDR(^-6i!M$)o;V@VlbuOar$xFpB0lYW zY!1c5Bi!88uC=W<`RM4Gor15e^z$f(o62ld)xq#R$mYc&5?PKI(?C8MDW~I+ESw$; z*(bN$l(`ohQn^N*`SF1(ZAhr7p4*n?pI5Lj97W+tmHEk&LaT?3 zK;tk20HuLU-daAsNbs~42tv-5BYvV80{Wq2I*>`J+hXd`gOJfAsL;TW!~OIL7KyJIGFg&NYJntBvZ3?x zl8v``I&@ObH8YO1?>QGw1+U*4UVTO4DvI5+DA>MyWVmxF(wxt*S5rfB+!_?gO#8IK zF)FQT=dvr@6R55g9kb_f^~oaZ>!K%DYmdYj(5eb!KdQ2wZ=^;Oon*O@)~7*exD{~6 z%{1P8Bbt{ldh<2?xk^}38@EP-Vd5GD)0c}WJo^|mt+31|jUazN&fQilwREEkBkU@m zMhp*~oWIq5w@YI(@)sye7hBYe&oWcL<2G9?+GEZ$-SZgGG?l0GJPg%&xzgjPp`KNCO2MU6V%3nC*2`MONTkAE%X zpW5jEcAT#*5$-Ds_vW84eQX!4Fk8LrzUq;y%>A6MC=BW$=8vDb(j=vEJYKW6YU1L= zo+-z9;GN98FD1m{H@K{QrNZ5{+6T2+i}3ShPg;~2{V?!?a3X7s-VII9HY{zG3hHRE zdt-vgYP%k7^&}3DlG|gaUbiPy=4XV;Of`P&y&HbNiU|B*y!$E@ZSXxV#zaS#I9wtO zVR&4*xkB3Bq^5gAQzbreV~Ol)xNZ7bIOq0kh-;P#%J4H!GKNv$8wr zW#|s-1NS?R8QiMPL%>(6-_eh8NM0PBLQZV05t=LcUtG9|wp}$zxyj5MLYTTf4nSDsuBF4swd-&56a@HX2qQ`3Aa+GZHV9ZCHCL-YqSEjdYOKEfZBi z{+wj29pf7=cyVksz!1QD6?fd!eRw~SPSkmJ9ik%F^T0`Cx3E0WPW|xQCa|_X!aGQ5 zJTtC;-?;zL5bvv;Ks}jdQsML7J6{20v*evGDYDt3G&tKT>B-YXbt$qFK~#D_xicYf zRQ_bVtVM~GJ!3{;A*4;pG4n-8Ka*u+hxsWlfNf=$_s?N@C(mSc)0J#O)h|Mvw79ri zb1m5r`D4OWBo;p6{mD4;;F3MLhN`t98h!r3o1Mw?rhqeKW0|M@EXQqN@=!#tF_Ao= zqM33pUG235oZs<{_8f2#*TI5*G|P`KnX5+#Vm;XD*l)e^dvLa|U0vgrB#|nSZFE=6 z;J<(Yz!Nmn2P5S|5}ulDm<+N?#zYFPu0M@_Ja*=3Ix1`bsE@5gm3D4$4U|0=qb1T6 zBbHCjVCIf?D18>doR;&501aGN-&zQ2$sxvRus zyLksOT%)VVs-@Z6y%Xr#Rhchd)EmNGN}4~&r=u8(BOtmB-~x4_5X?vLyDgvWs3Kgi zS@LRY-8YS_`1eR3RuzV8nb}dTcdfK<7`yL1yMMSw)i~G{8JQ97z}Ekv)_xNnsolFG zlpnpoKSq5!h&6XrvDQX^C5!g{imtCq)VM(jU&)7aR;@~#bE5`ovPRLnmU@x!Gm@&; zoFdePiAvNs*sBH@5iVMt%E5>gCAtm;?h&{yhvJLpk<)YZvzS{HRzh2z*XqZ7w!AOt z->?>#P|GakL}F40p`Y+?%;d$CZTq5(WJIWsZD>Cxa2>K7QL-id>ibebnb8O@f8=f%YKyYT#U!-fVQ9{Q@<(Neq8+djWhqEi_K&YyizqPCk{F-lug9Gd3~i$}_D zvucMpFBNWli(RjC`-?pu!=5FYss~_PdN`CXP}C5-JfZlv_D& z&g(_A{j9OXhXKV$HkJzuq=_v6qpoK!x&MfGi&G(5Sr^DNbD0Wv7X!ghyt zMpf`H=JE&h7+=H{s8O9B2e|3HtcNzy&jgg)duDRzxDoK75@VHk%=ntK<6-u6d18%@ z?afTK_7R_NWKwR?MeCMIkkqHVqLfe-kdJDp9s!-*k5{<%rPFDkAQ2f%FPv=`Pd$gv zJt`JcljN!IDRSrpil5~kKog6(U&VAp%k0wIp|BP9yz_WKy;4iTrjR@@`+N-kq^R#% z$N`2K3!n7Y%7i3$lQtGpwb$pWDffx?Rg0HONo+|d7_98q9$6ie$DLTX$iKaJQtU|H zDy$B297)%O=tFvd>_^W+%_QG!>8s7xpH$$Sx)qY;v4Sl0-K|k{}ik0^gWmKzpr_*H3zzVLy+iK{@^@NQ(XX*YE5!0>cD*QB= z)NF#=4x{JqAG#6B-y^Yq=o4qNZ$2c~{BY>PORI68XVs}EOX&p0Mjm5bjM}4G>sN=e zft|5m3N5pLWO;F_)y`RJLR7OvA-AxOqK6Vj*5&J)o#z=cS}rk89}W{|?Nb(M1XxHS zI`TO`BImiie}Qs%Uy4PNk#ub#!=B-1{FDPo2ROf1B>&fz#d97vL9wHhX8@(pIOLxy z^FL8{SpTU`|D3d-2>cIqdXPaD02cmTogQ}2^Z%dgv5<<&95z9%EdMoryodkF^d1@b{0KMWN_WpApsbfW708La79@srED#2DB;jS|{@BOTZyWZg zxLc!lXrH5`X{!{YSWU-9xG9Z&xYO~@Ksf;IF;_rNIwB+LlE~87;~qY{s@_BFQ&n5 zsc~JpKAg(n>`ccocJs!3p!k$Be!&S%AZ*Rk?UsYxqLK?Q^B7LMZL?2YVc0?{Gexk$E%F`?BaOUir$6PW<$H zeb;(vk+0}Aeu&U9Km|xceeH_~{!f6sL^hHn{v%||>F5%P`woz|*>Z1CVx(9pOQM?Y+UW|Z)W7;;EcxR){Rq?%(youRS%7Yr zMFX%lbw}>EYv~Ofsn;DXcS$YtCFHO*8AHtTb7`gvw{;=r)L(rPJk_US9^LSZGb0C-U(Mb;JImf{9|^q|e3z0wUr5p^ zd$a;a{t##BvtyaUa=o~5{1`I)S-kI2sL%_sRY3hZjyUUcXeD9X}D=(J8@J` z+ZyPe9a}IWDB(k?bPdTBz7BI)e?CGDSx>fU*p|&^tNYg-qRm;LR?sm96wzr#{!lh+x z$n`F|Ozbf5k{9&0Ih&=`cs@QX&1Qzgb%01SP(N=C#!f8F6y$VlTr8bHWPgpuoto z{{eyeI|}vp Date: Mon, 7 Aug 2023 14:53:30 +0300 Subject: [PATCH 12/32] Using a separate event cache file for each process (#6590) --- cvat/settings/base.py | 16 ++++++++++------ cvat/settings/testing.py | 5 ----- supervisord/server.conf | 3 ++- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/cvat/settings/base.py b/cvat/settings/base.py index 69e2c7f33d0..0664e6cd8e7 100644 --- a/cvat/settings/base.py +++ b/cvat/settings/base.py @@ -420,17 +420,21 @@ class CVAT_QUEUES(Enum): # Make sure to update other config files when updating these directories DATA_ROOT = os.path.join(BASE_DIR, 'data') -EVENTS_LOCAL_DB = os.path.join(DATA_ROOT, 'events.db') -os.makedirs(DATA_ROOT, exist_ok=True) -if not os.path.exists(EVENTS_LOCAL_DB): - open(EVENTS_LOCAL_DB, 'w').close() - MEDIA_DATA_ROOT = os.path.join(DATA_ROOT, 'data') os.makedirs(MEDIA_DATA_ROOT, exist_ok=True) CACHE_ROOT = os.path.join(DATA_ROOT, 'cache') os.makedirs(CACHE_ROOT, exist_ok=True) +EVENTS_LOCAL_DB_ROOT = os.path.join(CACHE_ROOT, 'events') +os.makedirs(EVENTS_LOCAL_DB_ROOT, exist_ok=True) +EVENTS_LOCAL_DB_FILE = os.path.join( + EVENTS_LOCAL_DB_ROOT, + os.getenv('CVAT_EVENTS_LOCAL_DB_FILENAME', 'events.db'), +) +if not os.path.exists(EVENTS_LOCAL_DB_FILE): + open(EVENTS_LOCAL_DB_FILE, 'w').close() + JOBS_ROOT = os.path.join(DATA_ROOT, 'jobs') os.makedirs(JOBS_ROOT, exist_ok=True) @@ -504,7 +508,7 @@ class CVAT_QUEUES(Enum): 'port': os.getenv('DJANGO_LOG_SERVER_PORT', 8282), 'version': 1, 'message_type': 'django', - 'database_path': EVENTS_LOCAL_DB, + 'database_path': EVENTS_LOCAL_DB_FILE, } }, 'loggers': { diff --git a/cvat/settings/testing.py b/cvat/settings/testing.py index 74703f86ad8..bba64d94dc3 100644 --- a/cvat/settings/testing.py +++ b/cvat/settings/testing.py @@ -18,11 +18,6 @@ DATA_ROOT = os.path.join(BASE_DIR, 'data') os.makedirs(DATA_ROOT, exist_ok=True) -EVENTS_LOCAL_DB = os.path.join(DATA_ROOT, 'events.db') -os.makedirs(DATA_ROOT, exist_ok=True) -if not os.path.exists(EVENTS_LOCAL_DB): - open(EVENTS_LOCAL_DB, 'w').close() - MEDIA_DATA_ROOT = os.path.join(DATA_ROOT, 'data') os.makedirs(MEDIA_DATA_ROOT, exist_ok=True) diff --git a/supervisord/server.conf b/supervisord/server.conf index 88707249f2f..5fba7a5e029 100644 --- a/supervisord/server.conf +++ b/supervisord/server.conf @@ -39,7 +39,8 @@ process_name=%(program_name)s-%(process_num)s socket=unix:///tmp/uvicorn.sock command=%(ENV_HOME)s/wait-for-it.sh %(ENV_CVAT_POSTGRES_HOST)s:5432 -t 0 -- python3 -m uvicorn --fd 0 --forwarded-allow-ips='*' cvat.asgi:application -environment=SSH_AUTH_SOCK="/tmp/ssh-agent.sock" +autorestart=true +environment=SSH_AUTH_SOCK="/tmp/ssh-agent.sock",CVAT_EVENTS_LOCAL_DB_FILENAME="events_%(process_num)03d.db" numprocs=%(ENV_NUMPROCS)s process_name=%(program_name)s-%(process_num)s stdout_logfile=/dev/stdout From a8e921bdd7d008f6b7a9a07430956a5fba6385f5 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Mon, 7 Aug 2023 18:59:12 +0300 Subject: [PATCH 13/32] Remove the YOLOv5 dependency from the YOLOv7 serverless function (#6619) For some reason, the YOLOv7 function uses `ultralytics/yolov5` as the base image, despite not depending on YOLOv5 in any way. Replace it with generic base images. --- serverless/onnx/WongKinYiu/yolov7/nuclio/function-gpu.yaml | 6 +++--- serverless/onnx/WongKinYiu/yolov7/nuclio/function.yaml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/serverless/onnx/WongKinYiu/yolov7/nuclio/function-gpu.yaml b/serverless/onnx/WongKinYiu/yolov7/nuclio/function-gpu.yaml index 45f410aeb7e..199c0d0c2f8 100644 --- a/serverless/onnx/WongKinYiu/yolov7/nuclio/function-gpu.yaml +++ b/serverless/onnx/WongKinYiu/yolov7/nuclio/function-gpu.yaml @@ -96,18 +96,18 @@ spec: eventTimeout: 30s build: image: cvat.onnx.wongkinyiu.yolov7 - baseImage: ultralytics/yolov5:latest + baseImage: nvidia/cuda:12.2.0-runtime-ubuntu22.04 directives: preCopy: - kind: USER value: root - kind: RUN - value: apt update && apt install --no-install-recommends -y libglib2.0-0 wget + value: apt update && apt install --no-install-recommends -y wget python3-pip - kind: WORKDIR value: /opt/nuclio - kind: RUN - value: pip install onnxruntime + value: pip install onnxruntime opencv-python-headless pillow pyyaml - kind: WORKDIR value: /opt/nuclio - kind: RUN diff --git a/serverless/onnx/WongKinYiu/yolov7/nuclio/function.yaml b/serverless/onnx/WongKinYiu/yolov7/nuclio/function.yaml index 328e2bbd64f..5642ce58fb1 100644 --- a/serverless/onnx/WongKinYiu/yolov7/nuclio/function.yaml +++ b/serverless/onnx/WongKinYiu/yolov7/nuclio/function.yaml @@ -95,16 +95,16 @@ spec: eventTimeout: 30s build: image: cvat.onnx.wongkinyiu.yolov7 - baseImage: ultralytics/yolov5:latest-cpu + baseImage: ubuntu:22.04 directives: preCopy: - kind: USER value: root - kind: RUN - value: apt update && apt install --no-install-recommends -y libglib2.0-0 && apt install wget + value: apt update && apt install --no-install-recommends -y wget python3-pip - kind: RUN - value: pip install onnxruntime + value: pip install onnxruntime opencv-python-headless pillow pyyaml - kind: WORKDIR value: /opt/nuclio - kind: RUN From c68cb077fa56ffa4b413970ff973ba24ea0a0e0b Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Tue, 8 Aug 2023 12:29:50 +0300 Subject: [PATCH 14/32] Add documentation for auto-annnotation in SDK/CLI (#6611) Documentation for #6483. --- cvat-sdk/README.md | 1 + site/content/en/docs/api_sdk/cli/_index.md | 44 +++- site/content/en/docs/api_sdk/sdk/_index.md | 6 + .../en/docs/api_sdk/sdk/auto-annotation.md | 220 ++++++++++++++++++ 4 files changed, 269 insertions(+), 2 deletions(-) create mode 100644 site/content/en/docs/api_sdk/sdk/auto-annotation.md diff --git a/cvat-sdk/README.md b/cvat-sdk/README.md index 8fa7b437974..cf5732aaeba 100644 --- a/cvat-sdk/README.md +++ b/cvat-sdk/README.md @@ -8,6 +8,7 @@ The SDK API includes several layers: - Server API wrappers (`ApiClient`). Located in at `cvat_sdk.api_client`. - High-level tools (`Core`). Located at `cvat_sdk.core`. - PyTorch adapter. Located at `cvat_sdk.pytorch`. +* Auto-annotation support. Located at `cvat_sdk.auto_annotation`. Package documentation is available [here](https://opencv.github.io/cvat/docs/api_sdk/sdk). diff --git a/site/content/en/docs/api_sdk/cli/_index.md b/site/content/en/docs/api_sdk/cli/_index.md index c44596a8983..83b4b269219 100644 --- a/site/content/en/docs/api_sdk/cli/_index.md +++ b/site/content/en/docs/api_sdk/cli/_index.md @@ -39,12 +39,12 @@ You can get help with `cvat-cli --help`. ``` usage: cvat-cli [-h] [--version] [--insecure] [--auth USER:[PASS]] [--server-host SERVER_HOST] [--server-port SERVER_PORT] [--organization SLUG] [--debug] - {create,delete,ls,frames,dump,upload,export,import} ... + {create,delete,ls,frames,dump,upload,export,import,auto-annotate} ... Perform common operations related to CVAT tasks. positional arguments: - {create,delete,ls,frames,dump,upload,export,import} + {create,delete,ls,frames,dump,upload,export,import,auto-annotate} options: -h, --help show this help message and exit @@ -230,3 +230,43 @@ by using the [label constructor](/docs/manual/basics/creating_an_annotation_task ```bash cvat-cli import task_backup.zip ``` + +### Auto-annotate + +This command provides a command-line interface +to the [auto-annotation API](/docs/api_sdk/sdk/auto-annotation). +To use it, create a Python module that implements the AA function protocol. + +In other words, this module must define the required attributes on the module level. +For example: + +```python +import cvat_sdk.auto_annotation as cvataa + +spec = cvataa.DetectionFunctionSpec(...) + +def detect(context, image): + ... +``` + +- Annotate the task with id 137 with the predefined YOLOv8 function: + ```bash + cvat-cli auto-annotate 137 --function-module cvat_sdk.auto_annotation.functions.yolov8n + ``` + +- Annotate the task with id 138 with an AA function defined in `my_func.py`: + ```bash + cvat-cli auto-annotate 138 --function-file path/to/my_func.py + ``` + +Note that this command does not modify the Python module search path. +If your function module needs to import other local modules, +you must add your module directory to the search path +if it isn't there already. + +- Annotate the task with id 139 with a function defined in the `my_func` module + located in the `my-project` directory, + letting it import other modules from that directory. + ```bash + PYTHONPATH=path/to/my-project cvat-cli auto-annotate 139 --function-module my_func + ``` diff --git a/site/content/en/docs/api_sdk/sdk/_index.md b/site/content/en/docs/api_sdk/sdk/_index.md index 1b9695ea7f2..eca76659b90 100644 --- a/site/content/en/docs/api_sdk/sdk/_index.md +++ b/site/content/en/docs/api_sdk/sdk/_index.md @@ -15,6 +15,7 @@ SDK API includes several layers: - Low-level API with REST API wrappers. Located at `cvat_sdk.api_client`. [Read more](/docs/api_sdk/sdk/lowlevel-api) - High-level API. Located at `cvat_sdk.core`. [Read more](/docs/api_sdk/sdk/highlevel-api) - PyTorch adapter. Located at `cvat_sdk.pytorch`. [Read more](/docs/api_sdk/sdk/pytorch-adapter) +- Auto-annotation API. Located at `cvat_sdk.auto_annotation.` [Read more](/docs/api_sdk/sdk/auto-annotation) In general, the low-level API provides single-request operations, while the high-level one implements composite, multi-request operations, and provides local proxies for server objects. @@ -25,6 +26,11 @@ The PyTorch adapter is a specialized layer that represents datasets stored in CVAT as PyTorch `Dataset` objects. This enables direct use of such datasets in PyTorch-based machine learning pipelines. +The auto-annotation API is a specialized layer +that lets you automatically annotate CVAT datasets +by running a custom function on the local machine. +See also the `auto-annotate` command in the CLI. + ## Installation To install an [official release of CVAT SDK](https://pypi.org/project/cvat-sdk/) use this command: diff --git a/site/content/en/docs/api_sdk/sdk/auto-annotation.md b/site/content/en/docs/api_sdk/sdk/auto-annotation.md new file mode 100644 index 00000000000..a8d9c1d79c2 --- /dev/null +++ b/site/content/en/docs/api_sdk/sdk/auto-annotation.md @@ -0,0 +1,220 @@ +--- +title: 'Auto-annotation API' +linkTitle: 'Auto-annotation API' +weight: 6 +--- + +## Overview + +This layer provides functionality that allows you to automatically annotate a CVAT dataset +by running a custom function on your local machine. +A function, in this context, is a Python object that implements a particular protocol +defined by this layer. +To avoid confusion with Python functions, +auto-annotation functions will be referred to as "AA functions" in the following text. +A typical AA function will be based on a machine learning model +and consist of the following basic elements: + +- Code to load the ML model. + +- A specification describing the annotations that the AA function can produce. + +- Code to convert data from CVAT to a format the ML model can understand. + +- Code to run the ML model. + +- Code to convert resulting annotations to a format CVAT can understand. + +The layer can be divided into several parts: + +- The interface, containing the protocol that an AA function must implement. + +- The driver, containing functionality to annotate a CVAT dataset using an AA function. + +- The predefined AA function based on Ultralytics YOLOv8n. + +The `auto-annotate` CLI command provides a way to use an AA function from the command line +rather than from a Python program. +See [the CLI documentation](/docs/api_sdk/cli/) for details. + +## Example + +```python +from typing import List +import PIL.Image + +import torchvision.models + +from cvat_sdk import make_client +import cvat_sdk.models as models +import cvat_sdk.auto_annotation as cvataa + +class TorchvisionDetectionFunction: + def __init__(self, model_name: str, weights_name: str, **kwargs) -> None: + # load the ML model + weights_enum = torchvision.models.get_model_weights(model_name) + self._weights = weights_enum[weights_name] + self._transforms = self._weights.transforms() + self._model = torchvision.models.get_model(model_name, weights=self._weights, **kwargs) + self._model.eval() + + @property + def spec(self) -> cvataa.DetectionFunctionSpec: + # describe the annotations + return cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec(cat, i) + for i, cat in enumerate(self._weights.meta['categories']) + ] + ) + + def detect(self, context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + # convert the input into a form the model can understand + transformed_image = [self._transforms(image)] + + # run the ML model + results = self._model(transformed_image) + + # convert the results into a form CVAT can understand + return [ + cvataa.rectangle(label.item(), [x.item() for x in box]) + for result in results + for box, label in zip(result['boxes'], result['labels']) + ] + +# log into the CVAT server +with make_client(host="localhost", credentials=("user", "password")) as client: + # annotate task 12345 using Faster R-CNN + cvataa.annotate_task(client, 41617, + TorchvisionDetectionFunction("fasterrcnn_resnet50_fpn_v2", "DEFAULT", box_score_thresh=0.5), + ) +``` + +## Auto-annotation interface + +Currently, the only type of AA function supported by this layer is the detection function. +Therefore, all of the following information will pertain to detection functions. + +A detection function accepts an image and returns a list of shapes found in that image. +When it is applied to a dataset, the AA function is run for every image, +and the resulting lists of shapes are combined and uploaded to CVAT. + +A detection function must have two attributes, `spec` and `detect`. + +`spec` must contain the AA function's specification, +which is an instance of `DetectionFunctionSpec`. + +`DetectionFunctionSpec` must be initialized with a sequence of `PatchedLabelRequest` objects +that represent the labels that the AA function knows about. +See the docstring of `DetectionFunctionSpec` for more information on the constraints +that these objects must follow. + +`detect` must be a function/method accepting two parameters: + +- `context` (`DetectionFunctionContext`). + Contains information about the current image. + Currently `DetectionFunctionContext` only contains a single field, `frame_name`, + which contains the file name of the frame on the CVAT server. + +- `image` (`PIL.Image.Image`). + Contains image data. + +`detect` must return a list of `LabeledShapeRequest` objects, +representing shapes found in the image. +See the docstring of `DetectionFunctionSpec` for more information on the constraints +that these objects must follow. + +The same AA function may be used with any dataset that contain labels with the same name +as the AA function's specification. +The way it works is that the driver matches labels between the spec and the dataset, +and replaces the label IDs in the shape objects with those defined in the dataset. + +For example, suppose the AA function's spec defines the following labels: + +| Name | ID | +|-------|----| +| `bat` | 0 | +| `rat` | 1 | + +And the dataset defines the following labels: + +| Name | ID | +|-------|-----| +| `bat` | 100 | +| `cat` | 101 | +| `rat` | 102 | + +Then suppose `detect` returns a shape with `label_id` equal to 1. +The driver will see that it refers to the `rat` label, and replace it with 102, +since that's the ID this label has in the dataset. + +The same logic is used for sub-label IDs. + +### Helper factory functions + +The CVAT API model types used in the AA function protocol are somewhat unwieldy to work with, +so it's recommented to use the helper factory functions provided by this layer. +These helpers instantiate an object of their corresponding model type, +passing their arguments to the model constructor +and sometimes setting some attributes to fixed values. + +The following helpers are available for building specifications: + +| Name | Model type | Fixed attributes | +|-----------------------|-----------------------|-------------------| +| `label_spec` | `PatchedLabelRequest` | - | +| `skeleton_label_spec` | `PatchedLabelRequest` | `type="skeleton"` | +| `keypoint_spec` | `SublabelRequest` | - | + +The following helpers are available for use in `detect`: + +| Name | Model type | Fixed attributes | +|-------------|--------------------------|-------------------------------| +| `shape` | `LabeledShapeRequest` | `frame=0` | +| `rectangle` | `LabeledShapeRequest` | `frame=0`, `type="rectangle"` | +| `skeleton` | `LabeledShapeRequest` | `frame=0`, `type="skeleton"` | +| `keypoint` | `SubLabeledShapeRequest` | `frame=0`, `type="points"` | + +## Auto-annotation driver + +The `annotate_task` function uses an AA function to annotate a CVAT task. +It must be called as follows: + +```python +annotate_task(, , , ) +``` + +The supplied client will be used to make all API calls. + +By default, new annotations will be appended to the old ones. +Use `clear_existing=True` to remove old annotations instead. + +If a detection function declares a label that has no matching label in the task, +then by default, `BadFunctionError` is raised, and auto-annotation is aborted. +If you use `allow_unmatched_label=True`, then such labels will be ignored, +and any shapes referring to them will be dropped. +Same logic applies to sub-label IDs. + +`annotate_task` will raise a `BadFunctionError` exception +if it detects that the function violated the AA function protocol. + +## Predefined AA function + +This layer includes a predefined AA function based on the Ultralytics YOLOv8n model. +You can use this AA function as-is, or use it as a base on which to build your own. + +To use this function, you have to install CVAT SDK with the `ultralytics` extra: + +```console +$ pip install "cvat-sdk[ultralytics]" +``` + +The AA function is implemented as a module +in order to be compatible with the `cvat-cli auto-annotate` command. +Simply import `cvat_sdk.auto_annotation.functions.yolov8n` +and use the module itself as a function: + +```python +import cvat_sdk.auto_annotation.functions.yolov8n as yolov8n +annotate_task(, , yolov8n) +``` From f876cb6226826f265457d76137b2bf6e6fe63f16 Mon Sep 17 00:00:00 2001 From: Andrey Zhavoronkov Date: Wed, 9 Aug 2023 11:35:31 +0300 Subject: [PATCH 15/32] Optimized clickhouse query for `objects` metric (#6584) ### Motivation and context ### How has this been tested? ### Checklist - [ ] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [ ] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- .../report/primary_metrics/objects.py | 44 ++++++------ cvat/requirements/base.in | 2 +- cvat/requirements/base.txt | 70 ++++++++++--------- cvat/requirements/development.txt | 8 +-- cvat/requirements/production.txt | 12 ++-- cvat/requirements/testing.txt | 6 +- 6 files changed, 76 insertions(+), 66 deletions(-) diff --git a/cvat/apps/analytics_report/report/primary_metrics/objects.py b/cvat/apps/analytics_report/report/primary_metrics/objects.py index d7d369ca146..8004fd2e9f9 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/objects.py +++ b/cvat/apps/analytics_report/report/primary_metrics/objects.py @@ -12,38 +12,42 @@ class JobObjects(PrimaryMetricBase): _default_view = ViewChoice.HISTOGRAM _key = "objects" # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here - _query = "SELECT toStartOfDay(timestamp) as day, sum(JSONLength(JSONExtractString(payload, {object_type:String}))) as s FROM events WHERE scope = {scope:String} AND job_id = {job_id:UInt64} GROUP BY day ORDER BY day ASC" + _query = "SELECT toStartOfDay(timestamp) as day, scope, sum(count) FROM events WHERE scope IN ({scopes:Array(String)}) AND job_id = {job_id:UInt64} GROUP BY scope, day ORDER BY day ASC" _granularity = GranularityChoice.DAY def calculate(self): statistics = {} - - for action in ["create", "update", "delete"]: - action_data = statistics.setdefault(f"{action}d", {}) - for obj_type in ["tracks", "shapes", "tags"]: - result = self._make_clickhouse_query( - { - "scope": f"{action}:{obj_type}", - "object_type": obj_type, - "job_id": self._db_obj.id, - } - ) - action_data[obj_type] = {entry[0]: entry[1] for entry in result.result_rows} + actions = ("create", "update", "delete") + obj_types = ("tracks", "shapes", "tags") + scopes = [f"{action}:{obj_type}" for action in actions for obj_type in obj_types] + for action in actions: + statistics[action] = {} + for obj_type in obj_types: + statistics[action][obj_type] = {} + + result = self._make_clickhouse_query( + { + "scopes": scopes, + "job_id": self._db_obj.id, + } + ) + + for day, scope, count in result.result_rows: + action, obj_type = scope.split(":") + statistics[action][obj_type][day] = count objects_statistics = self.get_empty() dates = set() - for action in ["created", "updated", "deleted"]: - for obj in ["tracks", "shapes", "tags"]: + for action in actions: + for obj in obj_types: dates.update(statistics[action][obj].keys()) - for action in ["created", "updated", "deleted"]: + for action in actions: for date in sorted(dates): - objects_statistics[action].append( + objects_statistics[f"{action}d"].append( { - "value": sum( - statistics[action][t].get(date, 0) for t in ["tracks", "shapes", "tags"] - ), + "value": sum(statistics[action][t].get(date, 0) for t in obj_types), "datetime": date.isoformat() + "Z", } ) diff --git a/cvat/requirements/base.in b/cvat/requirements/base.in index 7c773219336..32e152e1e91 100644 --- a/cvat/requirements/base.in +++ b/cvat/requirements/base.in @@ -58,7 +58,7 @@ dnspython==2.2.0 setuptools==65.5.1 django-health-check==3.17.0 psutil==5.9.4 -clickhouse-connect==0.5.10 +clickhouse-connect==0.6.8 django-crum==0.7.9 wheel>=0.38.0 # not directly required, pinned by Snyk to avoid a vulnerability psycopg2-binary==2.9.5 diff --git a/cvat/requirements/base.txt b/cvat/requirements/base.txt index 2f18e19e7dc..669962ee68e 100644 --- a/cvat/requirements/base.txt +++ b/cvat/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:d1435558d66ec49d0c691492b2f3798960ca3bba +# SHA1:47a46c0f57bd02f1446db65b3d107ca9e7927d76 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -12,7 +12,7 @@ absl-py==1.4.0 # via # tensorboard # tensorflow -asgiref==3.7.1 +asgiref==3.7.2 # via django astunparse==1.6.3 # via tensorflow @@ -23,7 +23,7 @@ attrs==21.4.0 # -r cvat/requirements/base.in # datumaro # jsonschema -azure-core==1.26.4 +azure-core==1.28.0 # via # azure-storage-blob # msrest @@ -35,9 +35,9 @@ botocore==1.20.112 # via # boto3 # s3transfer -cachetools==5.3.0 +cachetools==5.3.1 # via google-auth -certifi==2023.5.7 +certifi==2023.7.22 # via # clickhouse-connect # msrest @@ -50,17 +50,17 @@ click==8.1.3 # via # -r cvat/requirements/base.in # rq -clickhouse-connect==0.5.10 +clickhouse-connect==0.6.8 # via -r cvat/requirements/base.in -contourpy==1.0.7 +contourpy==1.1.0 # via matplotlib coreapi==2.3.3 # via -r cvat/requirements/base.in coreschema==0.0.4 # via coreapi -croniter==1.3.15 +croniter==1.4.1 # via rq-scheduler -cryptography==41.0.0 +cryptography==41.0.2 # via # azure-storage-blob # pyjwt @@ -72,7 +72,7 @@ defusedxml==0.7.1 # via # datumaro # python3-openid -deprecated==1.2.13 +deprecated==1.2.14 # via limits diskcache==5.4.0 # via -r cvat/requirements/base.in @@ -80,7 +80,7 @@ dj-pagination==2.5.0 # via -r cvat/requirements/base.in dj-rest-auth[with_social]==2.2.7 # via -r cvat/requirements/base.in -django==4.2.1 +django==4.2.3 # via # -r cvat/requirements/base.in # dj-rest-auth @@ -132,9 +132,9 @@ easyprocess==0.3 # pyunpack entrypoint2==1.1 # via pyunpack -flatbuffers==23.5.9 +flatbuffers==23.5.26 # via tensorflow -fonttools==4.39.4 +fonttools==4.41.1 # via matplotlib furl==2.1.0 # via -r cvat/requirements/base.in @@ -144,11 +144,11 @@ gitdb==4.0.10 # via gitpython gitpython==3.1.30 # via -r cvat/requirements/base.in -google-api-core==2.11.0 +google-api-core==2.11.1 # via # google-cloud-core # google-cloud-storage -google-auth==2.18.1 +google-auth==2.22.0 # via # google-api-core # google-auth-oauthlib @@ -157,7 +157,7 @@ google-auth==2.18.1 # tensorboard google-auth-oauthlib==0.4.6 # via tensorboard -google-cloud-core==2.3.2 +google-cloud-core==2.3.3 # via google-cloud-storage google-cloud-storage==1.42.0 # via -r cvat/requirements/base.in @@ -167,9 +167,9 @@ google-pasta==0.2.0 # via tensorflow google-resumable-media==2.5.0 # via google-cloud-storage -googleapis-common-protos==1.59.0 +googleapis-common-protos==1.59.1 # via google-api-core -grpcio==1.54.2 +grpcio==1.56.2 # via # tensorboard # tensorflow @@ -180,7 +180,9 @@ h5py==3.6.0 # tensorflow idna==3.4 # via requests -importlib-resources==5.12.0 +importlib-metadata==6.8.0 + # via clickhouse-connect +importlib-resources==6.0.0 # via limits inflection==0.5.1 # via drf-spectacular @@ -200,11 +202,11 @@ keras==2.11.0 # via tensorflow kiwisolver==1.4.4 # via matplotlib -libclang==16.0.0 +libclang==16.0.6 # via tensorflow limits==3.5.0 # via python-logstash-async -lxml==4.9.2 +lxml==4.9.3 # via datumaro lz4==4.3.2 # via clickhouse-connect @@ -212,15 +214,15 @@ markdown==3.2.2 # via # -r cvat/requirements/base.in # tensorboard -markupsafe==2.1.2 +markupsafe==2.1.3 # via # jinja2 # werkzeug -matplotlib==3.7.1 +matplotlib==3.7.2 # via # datumaro # pycocotools -mistune==2.0.5 +mistune==3.0.1 # via -r cvat/requirements/base.in msrest==0.7.1 # via azure-storage-blob @@ -234,7 +236,7 @@ opt-einsum==3.3.0 # via tensorflow orderedmultidict==1.0.1 # via furl -orjson==3.8.13 +orjson==3.9.2 # via datumaro packaging==23.1 # via @@ -243,7 +245,7 @@ packaging==23.1 # nibabel # tensorboardx # tensorflow -pandas==2.0.1 +pandas==2.0.3 # via datumaro patool==1.12 # via -r cvat/requirements/base.in @@ -275,7 +277,7 @@ pycparser==2.21 # via cffi pygments==2.7.4 # via -r cvat/requirements/base.in -pyjwt[crypto]==2.7.0 +pyjwt[crypto]==2.8.0 # via django-allauth pylogbeat==2.0.0 # via python-logstash-async @@ -305,7 +307,7 @@ pytz==2020.1 # pandas pyunpack==0.2.1 # via -r cvat/requirements/base.in -pyyaml==6.0 +pyyaml==6.0.1 # via # datumaro # drf-spectacular @@ -345,7 +347,7 @@ rq-scheduler==0.10.0 # via -r cvat/requirements/base.in rsa==4.9 # via google-auth -ruamel-yaml==0.17.26 +ruamel-yaml==0.17.32 # via datumaro ruamel-yaml-clib==0.2.7 # via ruamel-yaml @@ -353,7 +355,7 @@ rules==3.3 # via -r cvat/requirements/base.in s3transfer==0.4.2 # via boto3 -scipy==1.10.1 +scipy==1.11.1 # via datumaro shapely==1.7.1 # via -r cvat/requirements/base.in @@ -390,7 +392,7 @@ tensorflow-io-gcs-filesystem==0.32.0 # via tensorflow termcolor==2.3.0 # via tensorflow -typing-extensions==4.6.2 +typing-extensions==4.7.1 # via # asgiref # azure-core @@ -410,9 +412,9 @@ urllib3==1.26.16 # clickhouse-connect # google-auth # requests -werkzeug==2.3.4 +werkzeug==2.3.6 # via tensorboard -wheel==0.40.0 +wheel==0.41.0 # via # -r cvat/requirements/base.in # astunparse @@ -421,6 +423,8 @@ wrapt==1.15.0 # via # deprecated # tensorflow +zipp==3.16.2 + # via importlib-metadata zstandard==0.21.0 # via clickhouse-connect diff --git a/cvat/requirements/development.txt b/cvat/requirements/development.txt index 65019acf8a5..61e3939f399 100644 --- a/cvat/requirements/development.txt +++ b/cvat/requirements/development.txt @@ -14,7 +14,7 @@ autopep8==2.0.2 # via django-silk black==23.3.0 # via -r cvat/requirements/development.in -dill==0.3.6 +dill==0.3.7 # via pylint django-extensions==3.0.8 # via -r cvat/requirements/development.in @@ -30,9 +30,9 @@ mccabe==0.7.0 # via pylint mypy-extensions==1.0.0 # via black -pathspec==0.11.1 +pathspec==0.11.2 # via black -platformdirs==3.5.1 +platformdirs==3.9.1 # via # black # pylint @@ -58,7 +58,7 @@ tomli==2.0.1 # autopep8 # black # pylint -tomlkit==0.11.8 +tomlkit==0.12.1 # via pylint tornado==6.3.2 # via snakeviz diff --git a/cvat/requirements/production.txt b/cvat/requirements/production.txt index 3899e828617..a903a1c2a63 100644 --- a/cvat/requirements/production.txt +++ b/cvat/requirements/production.txt @@ -1,4 +1,4 @@ -# SHA1:d3d4b2262fd87a700593e22be8811e6d04230e40 +# SHA1:784a6a811263fa11d49da152d9840f92b650d6fd # # This file is autogenerated by pip-compile-multi # To update, run: @@ -8,11 +8,15 @@ -r base.txt --no-binary av -anyio==3.6.2 +anyio==3.7.1 # via watchfiles +coverage==7.2.3 + # via -r cvat/requirements/production.in +exceptiongroup==1.1.2 + # via anyio h11==0.14.0 # via uvicorn -httptools==0.5.0 +httptools==0.6.0 # via uvicorn python-dotenv==1.0.0 # via uvicorn @@ -26,7 +30,5 @@ watchfiles==0.19.0 # via uvicorn websockets==11.0.3 # via uvicorn -coverage==7.2.3 - # via -r cvat/requirements/production.in # The following packages are considered to be unsafe in a requirements file: diff --git a/cvat/requirements/testing.txt b/cvat/requirements/testing.txt index a868cb1c6b0..7e2da3371e8 100644 --- a/cvat/requirements/testing.txt +++ b/cvat/requirements/testing.txt @@ -1,4 +1,4 @@ -# SHA1:910e8edd8fcfdbe7c9a7278ba499bcfad1313c19 +# SHA1:429cfd9ce2f6b66fbb7c898a5c6279d9d8a61335 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -8,11 +8,11 @@ -r development.txt --no-binary av +coverage==7.2.3 + # via -r cvat/requirements/testing.in fakeredis==2.10.3 # via -r cvat/requirements/testing.in sortedcontainers==2.4.0 # via fakeredis -coverage==7.2.3 - # via -r cvat/requirements/testing.in # The following packages are considered to be unsafe in a requirements file: From a2ec4080180d299a87b98b956d376a104764d408 Mon Sep 17 00:00:00 2001 From: Andrey Zhavoronkov Date: Wed, 9 Aug 2023 12:18:44 +0300 Subject: [PATCH 16/32] Bump SDK & CLI versions (#6576) --- cvat-cli/src/cvat_cli/version.py | 2 +- cvat-sdk/gen/generate.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cvat-cli/src/cvat_cli/version.py b/cvat-cli/src/cvat_cli/version.py index 84e6495dd8d..43921c2d67c 100644 --- a/cvat-cli/src/cvat_cli/version.py +++ b/cvat-cli/src/cvat_cli/version.py @@ -1 +1 @@ -VERSION = "2.5.0" +VERSION = "2.5.2" diff --git a/cvat-sdk/gen/generate.sh b/cvat-sdk/gen/generate.sh index 1b3e8bcb778..5bde94e68b0 100755 --- a/cvat-sdk/gen/generate.sh +++ b/cvat-sdk/gen/generate.sh @@ -8,7 +8,7 @@ set -e GENERATOR_VERSION="v6.0.1" -VERSION="2.5.0" +VERSION="2.5.2" LIB_NAME="cvat_sdk" LAYER1_LIB_NAME="${LIB_NAME}/api_client" DST_DIR="$(cd "$(dirname -- "$0")/.." && pwd)" From fc642b9707057682218da77108b816964a188c98 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Wed, 9 Aug 2023 14:02:39 +0300 Subject: [PATCH 17/32] Support hidden source maps (#6634) ### Motivation and context Need to hide somehow proprietary code. If ``SOURCE_MAPS_TOKEN`` env token is specified during building cvat-ui module, it will hide the source maps as ``${SOURCE_MAPS_TOKEN}/assets/.map``. Since this token is hidden, the code is not available for anyone. ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- Dockerfile.ui | 5 ++++- cvat-ui/webpack.config.js | 8 +++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Dockerfile.ui b/Dockerfile.ui index 30bf27b68f7..a2e235b2a2a 100644 --- a/Dockerfile.ui +++ b/Dockerfile.ui @@ -3,6 +3,7 @@ FROM node:lts-slim AS cvat-ui ARG WA_PAGE_VIEW_HIT ARG UI_APP_CONFIG ARG CLIENT_PLUGINS +ARG SOURCE_MAPS_TOKEN ENV TERM=xterm \ LANG='C.UTF-8' \ @@ -27,7 +28,9 @@ COPY cvat-core/ /tmp/cvat-core/ COPY cvat-canvas3d/ /tmp/cvat-canvas3d/ COPY cvat-canvas/ /tmp/cvat-canvas/ COPY cvat-ui/ /tmp/cvat-ui/ -RUN CLIENT_PLUGINS="${CLIENT_PLUGINS}" UI_APP_CONFIG="${UI_APP_CONFIG}" yarn run build:cvat-ui +RUN CLIENT_PLUGINS="${CLIENT_PLUGINS}" \ +UI_APP_CONFIG="${UI_APP_CONFIG}" \ +SOURCE_MAPS_TOKEN="${SOURCE_MAPS_TOKEN}" yarn run build:cvat-ui FROM nginx:mainline-alpine # Replace default.conf configuration to remove unnecessary rules diff --git a/cvat-ui/webpack.config.js b/cvat-ui/webpack.config.js index f933c2cab7e..e44e20cc57f 100644 --- a/cvat-ui/webpack.config.js +++ b/cvat-ui/webpack.config.js @@ -8,6 +8,7 @@ */ const path = require('path'); +const webpack = require('webpack'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const Dotenv = require('dotenv-webpack'); const CopyPlugin = require('copy-webpack-plugin'); @@ -17,7 +18,8 @@ module.exports = (env) => { const defaultPlugins = ['plugins/sam_plugin']; const appConfigFile = process.env.UI_APP_CONFIG ? process.env.UI_APP_CONFIG : defaultAppConfig; const pluginsList = process.env.CLIENT_PLUGINS ? [...defaultPlugins, ...process.env.CLIENT_PLUGINS.split(':')] - .map((s) => s.trim()).filter((s) => !!s) : defaultPlugins + .map((s) => s.trim()).filter((s) => !!s) : defaultPlugins; + const sourceMapsToken = process.env.SOURCE_MAPS_TOKEN || ''; const transformedPlugins = pluginsList .filter((plugin) => !!plugin).reduce((acc, _path, index) => ({ @@ -209,6 +211,10 @@ module.exports = (env) => { }, ], }), + ...(sourceMapsToken ? [new webpack.SourceMapDevToolPlugin({ + append: '\n', + filename: `${sourceMapsToken}/[file].map`, + })] : []), ], } }; From 534de8b9f6bba500e5c33cf0f8f7faa66d5b80b6 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Wed, 9 Aug 2023 15:17:22 +0300 Subject: [PATCH 18/32] Improved frame decoder module (#6585) ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [x] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [x] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- .vscode/launch.json | 1 + CHANGELOG.md | 1 + cvat-canvas/package.json | 2 +- cvat-canvas/src/typescript/canvasModel.ts | 2 +- cvat-core/package.json | 3 +- cvat-core/src/api.ts | 9 +- cvat-core/src/frames.ts | 827 ++++++------------ cvat-core/src/organization.ts | 28 +- cvat-core/src/quality-settings.ts | 18 +- cvat-core/src/server-proxy.ts | 51 +- cvat-core/src/server-response-types.ts | 34 + cvat-core/src/session-implementation.ts | 14 + cvat-core/src/session.ts | 13 + cvat-core/tests/api/frames.js | 16 - cvat-core/tests/mocks/server-proxy.mock.js | 23 +- cvat-data/package.json | 2 +- cvat-data/src/ts/cvat-data.ts | 364 +++----- cvat-ui/package.json | 2 +- .../{sam_plugin => sam}/src/ts/index.tsx | 8 +- .../controls-side-bar/opencv-control.tsx | 16 +- .../header/settings-modal/player-settings.tsx | 344 ++++---- .../header/settings-modal/styles.scss | 28 +- .../settings-modal/workspace-settings.tsx | 29 +- cvat-ui/src/reducers/index.ts | 3 + cvat-ui/src/reducers/plugins-reducer.ts | 3 + cvat-ui/webpack.config.js | 2 +- yarn.lock | 12 +- 27 files changed, 785 insertions(+), 1070 deletions(-) rename cvat-ui/plugins/{sam_plugin => sam}/src/ts/index.tsx (97%) diff --git a/.vscode/launch.json b/.vscode/launch.json index 366bd49a3fa..4d0f09c5525 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -520,6 +520,7 @@ "type": "node", "request": "launch", "name": "jest debug", + "cwd": "${workspaceFolder}/cvat-core", "program": "${workspaceFolder}/node_modules/.bin/jest", "args": [ "--config", diff --git a/CHANGELOG.md b/CHANGELOG.md index 4e2e25a1e77..872136a260c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 () - \[SDK\] Custom `ProgressReporter` implementations should now override `start2` instead of `start` () +- Optimized memory using & code in decoding module () ### Deprecated diff --git a/cvat-canvas/package.json b/cvat-canvas/package.json index 3205b986395..29b52cce824 100644 --- a/cvat-canvas/package.json +++ b/cvat-canvas/package.json @@ -1,6 +1,6 @@ { "name": "cvat-canvas", - "version": "2.17.2", + "version": "2.17.3", "description": "Part of Computer Vision Annotation Tool which presents its canvas library", "main": "src/canvas.ts", "scripts": { diff --git a/cvat-canvas/src/typescript/canvasModel.ts b/cvat-canvas/src/typescript/canvasModel.ts index aaa21282ea3..86e28b3fccb 100644 --- a/cvat-canvas/src/typescript/canvasModel.ts +++ b/cvat-canvas/src/typescript/canvasModel.ts @@ -569,7 +569,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { .catch((exception: any): void => { this.data.exception = exception; // don't notify when the frame is no longer needed - if (typeof exception !== 'number' || exception === this.data.imageID) { + if (typeof exception !== 'number') { this.notify(UpdateReasons.DATA_FAILED); } }); diff --git a/cvat-core/package.json b/cvat-core/package.json index 21eb64183a5..5556d187765 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "10.0.0", + "version": "10.0.1", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", "scripts": { @@ -27,7 +27,6 @@ "dependencies": { "@types/lodash": "^4.14.191", "axios": "^0.27.2", - "browser-or-node": "^2.0.0", "cvat-data": "link:./../cvat-data", "detect-browser": "^5.2.1", "error-stack-parser": "^2.0.2", diff --git a/cvat-core/src/api.ts b/cvat-core/src/api.ts index 4ff47db6ad1..9a99daa1f3f 100644 --- a/cvat-core/src/api.ts +++ b/cvat-core/src/api.ts @@ -326,16 +326,23 @@ function build() { cvat.server = Object.freeze(cvat.server); cvat.projects = Object.freeze(cvat.projects); cvat.tasks = Object.freeze(cvat.tasks); + cvat.assets = Object.freeze(cvat.assets); cvat.jobs = Object.freeze(cvat.jobs); + cvat.frames = Object.freeze(cvat.frames); cvat.users = Object.freeze(cvat.users); cvat.plugins = Object.freeze(cvat.plugins); cvat.lambda = Object.freeze(cvat.lambda); + // logger: todo: logger storage implemented other way + cvat.config = Object.freeze(cvat.config); cvat.client = Object.freeze(cvat.client); cvat.enums = Object.freeze(cvat.enums); + cvat.exceptions = Object.freeze(cvat.exceptions); cvat.cloudStorages = Object.freeze(cvat.cloudStorages); cvat.organizations = Object.freeze(cvat.organizations); + cvat.webhooks = Object.freeze(cvat.webhooks); cvat.analytics = Object.freeze(cvat.analytics); - cvat.frames = Object.freeze(cvat.frames); + cvat.storage = Object.freeze(cvat.storage); + cvat.classes = Object.freeze(cvat.classes); const implemented = Object.freeze(implementAPI(cvat)); return implemented; diff --git a/cvat-core/src/frames.ts b/cvat-core/src/frames.ts index b1ed902d3ea..158e3582114 100644 --- a/cvat-core/src/frames.ts +++ b/cvat-core/src/frames.ts @@ -3,28 +3,35 @@ // // SPDX-License-Identifier: MIT -import { isBrowser, isNode } from 'browser-or-node'; - -import * as cvatData from 'cvat-data'; -import { DimensionType } from 'enums'; +import _ from 'lodash'; +import { + FrameDecoder, BlockType, DimensionType, ChunkQuality, decodeContextImages, RequestOutdatedError, +} from 'cvat-data'; import PluginRegistry from './plugins'; import serverProxy, { RawFramesMetaData } from './server-proxy'; -import { - Exception, ArgumentError, DataError, ServerError, -} from './exceptions'; +import { Exception, ArgumentError, DataError } from './exceptions'; // frame storage by job id const frameDataCache: Record & { deleted_frames: Record }; chunkSize: number; mode: 'annotation' | 'interpolation'; startFrame: number; stopFrame: number; - provider: cvatData.FrameProvider; - frameBuffer: FrameBuffer; + decodeForward: boolean; + forwardStep: number; + latestFrameDecodeRequest: number | null; + latestContextImagesRequest: number | null; + provider: FrameDecoder; decodedBlocksCacheSize: number; - activeChunkRequest: null; - nextChunkRequest: null; + activeChunkRequest: Promise | null; + activeContextRequest: Promise> | null; + contextCache: Record; + timestamp: number; + size: number; + }>; + getChunk: (chunkNumber: number, quality: ChunkQuality) => Promise; }> = {}; export class FramesMetaData { @@ -98,15 +105,20 @@ export class FramesMetaData { } export class FrameData { + public readonly filename: string; + public readonly width: number; + public readonly height: number; + public readonly number: number; + public readonly relatedFiles: number; + public readonly deleted: boolean; + public readonly jobID: number; + constructor({ width, height, name, jobID, frameNumber, - startFrame, - stopFrame, - decodeForward, deleted, related_files: relatedFiles, }) { @@ -125,7 +137,7 @@ export class FrameData { value: height, writable: false, }, - jid: { + jobID: { value: jobID, writable: false, }, @@ -137,18 +149,6 @@ export class FrameData { value: relatedFiles, writable: false, }, - startFrame: { - value: startFrame, - writable: false, - }, - stopFrame: { - value: stopFrame, - writable: false, - }, - decodeForward: { - value: decodeForward, - writable: false, - }, deleted: { value: deleted, writable: false, @@ -157,233 +157,167 @@ export class FrameData { ); } - async data(onServerRequest = () => {}) { + async data(onServerRequest = () => {}): Promise { const result = await PluginRegistry.apiWrapper.call(this, FrameData.prototype.data, onServerRequest); return result; } - - get imageData() { - return this._data.imageData; - } - - set imageData(imageData) { - this._data.imageData = imageData; - } } -FrameData.prototype.data.implementation = async function (onServerRequest) { - return new Promise((resolve, reject) => { - const resolveWrapper = (data) => { - this._data = { - imageData: data, - renderWidth: this.width, - renderHeight: this.height, - }; - return resolve(this._data); - }; +Object.defineProperty(FrameData.prototype.data, 'implementation', { + value(this: FrameData, onServerRequest) { + return new Promise<{ + renderWidth: number; + renderHeight: number; + imageData: ImageBitmap | Blob; + } | Blob>((resolve, reject) => { + const { + provider, chunkSize, stopFrame, decodeForward, forwardStep, decodedBlocksCacheSize, + } = frameDataCache[this.jobID]; + + const requestId = +_.uniqueId(); + const chunkNumber = Math.floor(this.number / chunkSize); + const frame = provider.frame(this.number); + + function findTheNextNotDecodedChunk(searchFrom: number): number { + let firstFrameInNextChunk = searchFrom + forwardStep; + let nextChunkNumber = Math.floor(firstFrameInNextChunk / chunkSize); + while (nextChunkNumber === chunkNumber) { + firstFrameInNextChunk += forwardStep; + nextChunkNumber = Math.floor(firstFrameInNextChunk / chunkSize); + } - if (this._data) { - resolve(this._data); - return; - } + if (provider.isChunkCached(nextChunkNumber)) { + return findTheNextNotDecodedChunk(firstFrameInNextChunk); + } - const { provider } = frameDataCache[this.jid]; - const { chunkSize } = frameDataCache[this.jid]; - const start = parseInt(this.number / chunkSize, 10) * chunkSize; - const stop = Math.min(this.stopFrame, (parseInt(this.number / chunkSize, 10) + 1) * chunkSize - 1); - const chunkNumber = Math.floor(this.number / chunkSize); - - const onDecodeAll = async (frameNumber) => { - if ( - frameDataCache[this.jid].activeChunkRequest && - chunkNumber === frameDataCache[this.jid].activeChunkRequest.chunkNumber - ) { - const callbackArray = frameDataCache[this.jid].activeChunkRequest.callbacks; - for (let i = callbackArray.length - 1; i >= 0; --i) { - if (callbackArray[i].frameNumber === frameNumber) { - const callback = callbackArray[i]; - callbackArray.splice(i, 1); - callback.resolve(await provider.frame(callback.frameNumber)); + return nextChunkNumber; + } + + if (frame) { + if (decodeForward && decodedBlocksCacheSize > 1 && !frameDataCache[this.jobID].activeChunkRequest) { + const nextChunkNumber = findTheNextNotDecodedChunk(this.number); + const predecodeChunksMax = Math.floor(decodedBlocksCacheSize / 2); + if (nextChunkNumber * chunkSize <= stopFrame && + nextChunkNumber <= chunkNumber + predecodeChunksMax) { + provider.cleanup(1); + frameDataCache[this.jobID].activeChunkRequest = new Promise((resolveForward) => { + const releasePromise = (): void => { + resolveForward(); + frameDataCache[this.jobID].activeChunkRequest = null; + }; + + frameDataCache[this.jobID].getChunk( + nextChunkNumber, ChunkQuality.COMPRESSED, + ).then((chunk: ArrayBuffer) => { + provider.requestDecodeBlock( + chunk, + nextChunkNumber * chunkSize, + Math.min(stopFrame, (nextChunkNumber + 1) * chunkSize - 1), + () => {}, + releasePromise, + releasePromise, + ); + }).catch(() => { + releasePromise(); + }); + }); } } - if (callbackArray.length === 0) { - frameDataCache[this.jid].activeChunkRequest = null; - } + + resolve({ + renderWidth: this.width, + renderHeight: this.height, + imageData: frame, + }); + return; } - }; - const rejectRequestAll = () => { - if ( - frameDataCache[this.jid].activeChunkRequest && - chunkNumber === frameDataCache[this.jid].activeChunkRequest.chunkNumber - ) { - for (const r of frameDataCache[this.jid].activeChunkRequest.callbacks) { - r.reject(r.frameNumber); + onServerRequest(); + frameDataCache[this.jobID].latestFrameDecodeRequest = requestId; + (frameDataCache[this.jobID].activeChunkRequest || Promise.resolve()).finally(() => { + if (frameDataCache[this.jobID].latestFrameDecodeRequest !== requestId) { + // not relevant request anymore + reject(this.number); + return; } - frameDataCache[this.jid].activeChunkRequest = null; - } - }; - const makeActiveRequest = () => { - const taskDataCache = frameDataCache[this.jid]; - const activeChunk = taskDataCache.activeChunkRequest; - activeChunk.request = serverProxy.frames - .getData(this.jid, activeChunk.chunkNumber) - .then((chunk) => { - frameDataCache[this.jid].activeChunkRequest.completed = true; - if (!taskDataCache.nextChunkRequest) { - provider.requestDecodeBlock( - chunk, - taskDataCache.activeChunkRequest.start, - taskDataCache.activeChunkRequest.stop, - taskDataCache.activeChunkRequest.onDecodeAll, - taskDataCache.activeChunkRequest.rejectRequestAll, - ); - } - }) - .catch((exception) => { - if (exception instanceof Exception) { - reject(exception); - } else { - reject(new Exception(exception.message)); - } - }) - .finally(() => { - if (taskDataCache.nextChunkRequest) { - if (taskDataCache.activeChunkRequest) { - for (const r of taskDataCache.activeChunkRequest.callbacks) { - r.reject(r.frameNumber); - } - } - taskDataCache.activeChunkRequest = taskDataCache.nextChunkRequest; - taskDataCache.nextChunkRequest = null; - makeActiveRequest(); - } - }); - }; + // it might appear during decoding, so, check again + const currentFrame = provider.frame(this.number); + if (currentFrame) { + resolve({ + renderWidth: this.width, + renderHeight: this.height, + imageData: currentFrame, + }); + return; + } - if (isNode) { - resolve('Dummy data'); - } else if (isBrowser) { - provider - .frame(this.number) - .then((frame) => { - if (frame === null) { - onServerRequest(); - const activeRequest = frameDataCache[this.jid].activeChunkRequest; - if (!provider.isChunkCached(start, stop)) { - if ( - !activeRequest || - (activeRequest && - activeRequest.completed && - activeRequest.chunkNumber !== chunkNumber) - ) { - if (activeRequest && activeRequest.rejectRequestAll) { - activeRequest.rejectRequestAll(); - } - frameDataCache[this.jid].activeChunkRequest = { - request: null, - chunkNumber, - start, - stop, - onDecodeAll, - rejectRequestAll, - completed: false, - callbacks: [ - { - resolve: resolveWrapper, - reject, - frameNumber: this.number, - }, - ], - }; - makeActiveRequest(); - } else if (activeRequest.chunkNumber === chunkNumber) { - if (!activeRequest.onDecodeAll && !activeRequest.rejectRequestAll) { - activeRequest.onDecodeAll = onDecodeAll; - activeRequest.rejectRequestAll = rejectRequestAll; - } - activeRequest.callbacks.push({ - resolve: resolveWrapper, - reject, - frameNumber: this.number, - }); - } else { - if (frameDataCache[this.jid].nextChunkRequest) { - const { callbacks } = frameDataCache[this.jid].nextChunkRequest; - for (const r of callbacks) { - r.reject(r.frameNumber); - } - } - frameDataCache[this.jid].nextChunkRequest = { - request: null, - chunkNumber, - start, - stop, - onDecodeAll, - rejectRequestAll, - completed: false, - callbacks: [ - { - resolve: resolveWrapper, - reject, - frameNumber: this.number, - }, - ], - }; - } - } else { - activeRequest.callbacks.push({ - resolve: resolveWrapper, - reject, - frameNumber: this.number, - }); - provider.requestDecodeBlock(null, start, stop, onDecodeAll, rejectRequestAll); - } - } else { - if ( - this.number % chunkSize > chunkSize / 4 && - provider.decodedBlocksCacheSize > 1 && - this.decodeForward && - !provider.isNextChunkExists(this.number) - ) { - const nextChunkNumber = Math.floor(this.number / chunkSize) + 1; - if (nextChunkNumber * chunkSize < this.stopFrame) { - provider.setReadyToLoading(nextChunkNumber); - const nextStart = nextChunkNumber * chunkSize; - const nextStop = Math.min(this.stopFrame, (nextChunkNumber + 1) * chunkSize - 1); - if (!provider.isChunkCached(nextStart, nextStop)) { - if (!frameDataCache[this.jid].activeChunkRequest) { - frameDataCache[this.jid].activeChunkRequest = { - request: null, - chunkNumber: nextChunkNumber, - start: nextStart, - stop: nextStop, - onDecodeAll: null, - rejectRequestAll: null, - completed: false, - callbacks: [], - }; - makeActiveRequest(); - } - } else { - provider.requestDecodeBlock(null, nextStart, nextStop, null, null); - } - } + frameDataCache[this.jobID].activeChunkRequest = new Promise(( + resolveLoadAndDecode, + ) => { + let wasResolved = false; + frameDataCache[this.jobID].getChunk( + chunkNumber, ChunkQuality.COMPRESSED, + ).then((chunk: ArrayBuffer) => { + try { + provider + .requestDecodeBlock( + chunk, + chunkNumber * chunkSize, + Math.min(stopFrame, (chunkNumber + 1) * chunkSize - 1), + (_frame: number, bitmap: ImageBitmap | Blob) => { + if (decodeForward) { + // resolve immediately only if is not playing + return; + } + + if (frameDataCache[this.jobID].latestFrameDecodeRequest === requestId && + this.number === _frame + ) { + wasResolved = true; + resolve({ + renderWidth: this.width, + renderHeight: this.height, + imageData: bitmap, + }); + } + }, () => { + frameDataCache[this.jobID].activeChunkRequest = null; + resolveLoadAndDecode(); + const decodedFrame = provider.frame(this.number); + if (decodeForward) { + // resolve after decoding everything if playing + resolve({ + renderWidth: this.width, + renderHeight: this.height, + imageData: decodedFrame, + }); + } else if (!wasResolved) { + reject(this.number); + } + }, (error: Error | RequestOutdatedError) => { + frameDataCache[this.jobID].activeChunkRequest = null; + resolveLoadAndDecode(); + if (error instanceof RequestOutdatedError) { + reject(this.number); + } else { + reject(error); + } + }, + ); + } catch (error) { + reject(error); } - resolveWrapper(frame); - } - }) - .catch((exception) => { - if (exception instanceof Exception) { - reject(exception); - } else { - reject(new Exception(exception.message)); - } + }).catch((error) => { + reject(error); + resolveLoadAndDecode(error); + }); }); - } - }); -}; + }); + }); + }, + writable: false, +}); function getFrameMeta(jobID, frame): RawFramesMetaData['frames'][0] { const { meta, mode, startFrame } = frameDataCache[jobID]; @@ -403,286 +337,83 @@ function getFrameMeta(jobID, frame): RawFramesMetaData['frames'][0] { return frameMeta; } -class FrameBuffer { - constructor(size, chunkSize, stopFrame, jobID) { - this._size = size; - this._buffer = {}; - this._contextImage = {}; - this._requestedChunks = {}; - this._chunkSize = chunkSize; - this._stopFrame = stopFrame; - this._activeFillBufferRequest = false; - this._jobID = jobID; - } - - addContextImage(frame, data): void { - const promise = new Promise((resolve, reject) => { - data.then((resolvedData) => { - const meta = getFrameMeta(this._jobID, frame); - return cvatData - .decodeZip(resolvedData, 0, meta.related_files, cvatData.DimensionType.DIMENSION_2D); - }).then((decodedData) => { - this._contextImage[frame] = decodedData; - resolve(); - }).catch((error: Error) => { - if (error instanceof ServerError && (error as any).code === 404) { - this._contextImage[frame] = {}; - resolve(); - } else { - reject(error); - } - }); - }); - - this._contextImage[frame] = promise; - } - - isContextImageAvailable(frame): boolean { - return frame in this._contextImage; - } - - getContextImage(frame): Promise { - return new Promise((resolve) => { - if (frame in this._contextImage) { - if (this._contextImage[frame] instanceof Promise) { - this._contextImage[frame].then(() => { - resolve(this.getContextImage(frame)); - }); - } else { - resolve({ ...this._contextImage[frame] }); - } - } else { - resolve([]); - } - }); - } - - getFreeBufferSize() { - let requestedFrameCount = 0; - for (const chunk of Object.values(this._requestedChunks)) { - requestedFrameCount += chunk.requestedFrames.size; +export function getContextImage(jobID: number, frame: number): Promise> { + return new Promise>((resolve, reject) => { + if (!(jobID in frameDataCache)) { + reject(new Error( + 'Frame data was not initialized for this job. Try first requesting any frame.', + )); } + const frameData = frameDataCache[jobID]; + const requestId = frame; + const { startFrame } = frameData; + const { related_files: relatedFiles } = frameData.meta.frames[frame - startFrame]; + + if (relatedFiles === 0) { + resolve({}); + } else if (frame in frameData.contextCache) { + resolve(frameData.contextCache[frame].data); + } else { + frameData.latestContextImagesRequest = requestId; + const executor = (): void => { + if (frameData.latestContextImagesRequest !== requestId) { + reject(frame); + } else if (frame in frameData.contextCache) { + resolve(frameData.contextCache[frame].data); + } else { + frameData.activeContextRequest = serverProxy.frames.getImageContext(jobID, frame) + .then((encodedImages) => decodeContextImages(encodedImages, 0, relatedFiles)); + frameData.activeContextRequest.then((images) => { + const size = Object.values(images) + .reduce((acc, image) => acc + image.width * image.height * 4, 0); + const totalSize = Object.values(frameData.contextCache) + .reduce((acc, item) => acc + item.size, 0); + if (totalSize > 512 * 1024 * 1024) { + const [leastTimestampFrame] = Object.entries(frameData.contextCache) + .sort(([, item1], [, item2]) => item1.timestamp - item2.timestamp)[0]; + delete frameData.contextCache[leastTimestampFrame]; + } - return this._size - Object.keys(this._buffer).length - requestedFrameCount; - } - - requestOneChunkFrames(chunkIdx) { - return new Promise((resolve, reject) => { - this._requestedChunks[chunkIdx] = { - ...this._requestedChunks[chunkIdx], - resolve, - reject, - }; - for (const frame of this._requestedChunks[chunkIdx].requestedFrames.entries()) { - const requestedFrame = frame[1]; - const frameMeta = getFrameMeta(this._jobID, requestedFrame); - const frameData = new FrameData({ - ...frameMeta, - jobID: this._jobID, - frameNumber: requestedFrame, - startFrame: frameDataCache[this._jobID].startFrame, - stopFrame: frameDataCache[this._jobID].stopFrame, - decodeForward: false, - deleted: requestedFrame in frameDataCache[this._jobID].meta, - }); + frameData.contextCache[frame] = { + data: images, + timestamp: Date.now(), + size, + }; - frameData - .data() - .then(() => { - if ( - !(chunkIdx in this._requestedChunks) || - !this._requestedChunks[chunkIdx].requestedFrames.has(requestedFrame) - ) { - reject(chunkIdx); + if (frameData.latestContextImagesRequest !== requestId) { + reject(frame); } else { - this._requestedChunks[chunkIdx].requestedFrames.delete(requestedFrame); - this._requestedChunks[chunkIdx].buffer[requestedFrame] = frameData; - if (this._requestedChunks[chunkIdx].requestedFrames.size === 0) { - const bufferedframes = Object.keys(this._requestedChunks[chunkIdx].buffer).map( - (f) => +f, - ); - this._requestedChunks[chunkIdx].resolve(new Set(bufferedframes)); - } + resolve(images); } - }) - .catch(() => { - reject(chunkIdx); + }).finally(() => { + frameData.activeContextRequest = null; }); - } - }); - } - - fillBuffer(startFrame, frameStep = 1, count = null) { - const freeSize = this.getFreeBufferSize(); - const requestedFrameCount = count ? count * frameStep : freeSize * frameStep; - const stopFrame = Math.min(startFrame + requestedFrameCount, this._stopFrame + 1); - - for (let i = startFrame; i < stopFrame; i += frameStep) { - const chunkIdx = Math.floor(i / this._chunkSize); - if (!(chunkIdx in this._requestedChunks)) { - this._requestedChunks[chunkIdx] = { - requestedFrames: new Set(), - resolve: null, - reject: null, - buffer: {}, - }; - } - this._requestedChunks[chunkIdx].requestedFrames.add(i); - } - - let bufferedFrames = new Set(); - - // if we send one request to get frame 1 with filling the buffer - // then quicky send one more request to get frame 1 - // frame 1 will be already decoded and written to buffer - // the second request gets frame 1 from the buffer, removes it from there and returns - // after the first request finishes decoding it tries to get frame 1, but failed - // because frame 1 was already removed from the buffer by the second request - // to prevent this behavior we do not write decoded frames to buffer till the end of decoding all chunks - const buffersToBeCommited = []; - const commitBuffers = () => { - for (const buffer of buffersToBeCommited) { - this._buffer = { - ...this._buffer, - ...buffer, - }; - } - }; + } + }; - // Need to decode chunks in sequence - // eslint-disable-next-line no-async-promise-executor - return new Promise(async (resolve, reject) => { - for (const chunkIdx of Object.keys(this._requestedChunks)) { - try { - const chunkFrames = await this.requestOneChunkFrames(chunkIdx); - if (chunkIdx in this._requestedChunks) { - bufferedFrames = new Set([...bufferedFrames, ...chunkFrames]); - - buffersToBeCommited.push(this._requestedChunks[chunkIdx].buffer); - delete this._requestedChunks[chunkIdx]; - if (Object.keys(this._requestedChunks).length === 0) { - commitBuffers(); - resolve(bufferedFrames); - } + if (!frameData.activeContextRequest) { + executor(); + } else { + const checkAndExecute = (): void => { + if (frameData.activeContextRequest) { + // if we just execute in finally + // it might raise multiple server requests for context images + // if the promise was pending before and several requests came for the same frame + // all these requests will stuck on "finally" + // and when the promise fullfilled, it will run all the microtasks + // since they all have the same request id, all they will perform in executor() + frameData.activeContextRequest.finally(() => setTimeout(checkAndExecute)); } else { - commitBuffers(); - reject(chunkIdx); - break; + executor(); } - } catch (error) { - commitBuffers(); - reject(error); - break; - } - } - }); - } - - async makeFillRequest(start, step, count = null) { - if (!this._activeFillBufferRequest) { - this._activeFillBufferRequest = true; - try { - await this.fillBuffer(start, step, count); - this._activeFillBufferRequest = false; - } catch (error) { - if (typeof error === 'number' && error in this._requestedChunks) { - this._activeFillBufferRequest = false; - } - throw error; - } - } - } - - async require(frameNumber: number, jobID: number, fillBuffer: boolean, frameStep: number): FrameData { - for (const frame in this._buffer) { - if (+frame < frameNumber || +frame >= frameNumber + this._size * frameStep) { - delete this._buffer[frame]; - } - } - - this._required = frameNumber; - const frameMeta = getFrameMeta(jobID, frameNumber); - let frame = new FrameData({ - ...frameMeta, - jobID, - frameNumber, - startFrame: frameDataCache[jobID].startFrame, - stopFrame: frameDataCache[jobID].stopFrame, - decodeForward: !fillBuffer, - deleted: frameNumber in frameDataCache[jobID].meta.deleted_frames, - }); - - if (frameNumber in this._buffer) { - frame = this._buffer[frameNumber]; - delete this._buffer[frameNumber]; - const cachedFrames = this.cachedFrames(); - if ( - fillBuffer && - !this._activeFillBufferRequest && - this._size > this._chunkSize && - cachedFrames.length < (this._size * 3) / 4 - ) { - const maxFrame = cachedFrames ? Math.max(...cachedFrames) : frameNumber; - if (maxFrame < this._stopFrame) { - this.makeFillRequest(maxFrame + 1, frameStep).catch((e) => { - if (e !== 'not needed') { - throw e; - } - }); - } - } - } else if (fillBuffer) { - this.clear(); - await this.makeFillRequest(frameNumber, frameStep, fillBuffer ? null : 1); - frame = this._buffer[frameNumber]; - } else { - this.clear(); - } - - return frame; - } + }; - clear() { - for (const chunkIdx in this._requestedChunks) { - if ( - Object.prototype.hasOwnProperty.call(this._requestedChunks, chunkIdx) && - this._requestedChunks[chunkIdx].reject - ) { - this._requestedChunks[chunkIdx].reject('not needed'); + setTimeout(checkAndExecute); } } - this._activeFillBufferRequest = false; - this._requestedChunks = {}; - this._buffer = {}; - } - - cachedFrames() { - return Object.keys(this._buffer).map((f) => +f); - } -} - -async function getImageContext(jobID, frame) { - return new Promise((resolve, reject) => { - serverProxy.frames - .getImageContext(jobID, frame) - .then((result) => { - resolve(result); - }) - .catch((error) => { - reject(error); - }); }); } -export async function getContextImage(jobID, frame) { - if (frameDataCache[jobID].frameBuffer.isContextImageAvailable(frame)) { - return frameDataCache[jobID].frameBuffer.getContextImage(frame); - } - const response = getImageContext(jobID, frame); - await frameDataCache[jobID].frameBuffer.addContextImage(frame, response); - return frameDataCache[jobID].frameBuffer.getContextImage(frame); -} - export function decodePreview(preview: Blob): Promise { return new Promise((resolve, reject) => { const reader = new FileReader(); @@ -707,60 +438,73 @@ export async function getFrame( isPlaying: boolean, step: number, dimension: DimensionType, -) { + getChunk: (chunkNumber: number, quality: ChunkQuality) => Promise, +): Promise { if (!(jobID in frameDataCache)) { - const blockType = chunkType === 'video' ? cvatData.BlockType.MP4VIDEO : cvatData.BlockType.ARCHIVE; + const blockType = chunkType === 'video' ? BlockType.MP4VIDEO : BlockType.ARCHIVE; const meta = await serverProxy.frames.getMeta('job', jobID); - meta.deleted_frames = Object.fromEntries(meta.deleted_frames.map((_frame) => [_frame, true])); - const mean = meta.frames.reduce((a, b) => a + b.width * b.height, 0) / meta.frames.length; + const updatedMeta = { + ...meta, + deleted_frames: Object.fromEntries(meta.deleted_frames.map((_frame) => [_frame, true])), + }; + const mean = updatedMeta.frames.reduce((a, b) => a + b.width * b.height, 0) / updatedMeta.frames.length; const stdDev = Math.sqrt( - meta.frames.map((x) => (x.width * x.height - mean) ** 2).reduce((a, b) => a + b) / - meta.frames.length, + updatedMeta.frames.map((x) => (x.width * x.height - mean) ** 2).reduce((a, b) => a + b) / + updatedMeta.frames.length, ); // limit of decoded frames cache by 2GB - const decodedBlocksCacheSize = Math.floor(2147483648 / (mean + stdDev) / 4 / chunkSize) || 1; - + const decodedBlocksCacheSize = Math.min( + Math.floor((2048 * 1024 * 1024) / ((mean + stdDev) * 4 * chunkSize)) || 1, 10, + ); frameDataCache[jobID] = { - meta, + meta: updatedMeta, chunkSize, mode, startFrame, stopFrame, - provider: new cvatData.FrameProvider( + decodeForward: isPlaying, + forwardStep: step, + provider: new FrameDecoder( blockType, chunkSize, - Math.max(decodedBlocksCacheSize, 9), decodedBlocksCacheSize, - 1, dimension, ), - frameBuffer: new FrameBuffer( - Math.min(180, decodedBlocksCacheSize * chunkSize), - chunkSize, - stopFrame, - jobID, - ), decodedBlocksCacheSize, activeChunkRequest: null, - nextChunkRequest: null, + activeContextRequest: null, + latestFrameDecodeRequest: null, + latestContextImagesRequest: null, + contextCache: {}, + getChunk, }; - - const frameMeta = getFrameMeta(jobID, frame); - frameDataCache[jobID].provider.setRenderSize(frameMeta.width, frameMeta.height); } - return frameDataCache[jobID].frameBuffer.require(frame, jobID, isPlaying, step); + const frameMeta = getFrameMeta(jobID, frame); + frameDataCache[jobID].provider.setRenderSize(frameMeta.width, frameMeta.height); + frameDataCache[jobID].decodeForward = isPlaying; + frameDataCache[jobID].forwardStep = step; + + return new FrameData({ + width: frameMeta.width, + height: frameMeta.height, + name: frameMeta.name, + related_files: frameMeta.related_files, + frameNumber: frame, + deleted: frame in frameDataCache[jobID].meta.deleted_frames, + jobID, + }); } -export async function getDeletedFrames(instanceType, id) { +export async function getDeletedFrames(instanceType: 'job' | 'task', id) { if (instanceType === 'job') { const { meta } = frameDataCache[id]; return meta.deleted_frames; } if (instanceType === 'task') { - const meta = await serverProxy.frames.getMeta('job', id); + const meta = await serverProxy.frames.getMeta('task', id); meta.deleted_frames = Object.fromEntries(meta.deleted_frames.map((_frame) => [_frame, true])); return meta; } @@ -768,19 +512,19 @@ export async function getDeletedFrames(instanceType, id) { throw new Exception(`getDeletedFrames is not implemented for ${instanceType}`); } -export function deleteFrame(jobID, frame) { +export function deleteFrame(jobID: number, frame: number): void { const { meta } = frameDataCache[jobID]; meta.deleted_frames[frame] = true; } -export function restoreFrame(jobID, frame) { +export function restoreFrame(jobID: number, frame: number): void { const { meta } = frameDataCache[jobID]; if (frame in meta.deleted_frames) { delete meta.deleted_frames[frame]; } } -export async function patchMeta(jobID) { +export async function patchMeta(jobID: number): Promise { const { meta } = frameDataCache[jobID]; const newMeta = await serverProxy.frames.saveMeta('job', jobID, { deleted_frames: Object.keys(meta.deleted_frames), @@ -799,7 +543,9 @@ export async function patchMeta(jobID) { frameDataCache[jobID].meta.deleted_frames = prevDeletedFrames; } -export async function findFrame(jobID, frameFrom, frameTo, filters) { +export async function findFrame( + jobID: number, frameFrom: number, frameTo: number, filters: { offset?: number, notDeleted: boolean }, +): Promise { const offset = filters.offset || 1; let meta; if (!frameDataCache[jobID]) { @@ -836,23 +582,16 @@ export async function findFrame(jobID, frameFrom, frameTo, filters) { return lastUndeletedFrame; } -export function getRanges(jobID) { +export function getRanges(jobID): Array { if (!(jobID in frameDataCache)) { - return { - decoded: [], - buffered: [], - }; + return []; } - return { - decoded: frameDataCache[jobID].provider.cachedFrames, - buffered: frameDataCache[jobID].frameBuffer.cachedFrames(), - }; + return frameDataCache[jobID].provider.cachedFrames; } -export function clear(jobID) { +export function clear(jobID: number): void { if (jobID in frameDataCache) { - frameDataCache[jobID].frameBuffer.clear(); delete frameDataCache[jobID]; } } diff --git a/cvat-core/src/organization.ts b/cvat-core/src/organization.ts index 166fa790d58..fcc3379d215 100644 --- a/cvat-core/src/organization.ts +++ b/cvat-core/src/organization.ts @@ -3,31 +3,15 @@ // // SPDX-License-Identifier: MIT +import { SerializedOrganization, SerializedOrganizationContact } from './server-response-types'; import { checkObjectType, isEnum } from './common'; import config from './config'; import { MembershipRole } from './enums'; -import { ArgumentError, ServerError } from './exceptions'; +import { ArgumentError, DataError } from './exceptions'; import PluginRegistry from './plugins'; import serverProxy from './server-proxy'; import User from './user'; -interface RawOrganizationData { - id?: number, - slug?: string, - name?: string, - description?: string, - created_date?: string, - updated_date?: string, - owner?: any, - contact?: OrganizationContact, -} - -interface OrganizationContact { - email?: string; - location?: string; - phoneNumber?: string -} - interface Membership { user: User; is_active: boolean; @@ -45,12 +29,12 @@ export default class Organization { public readonly createdDate: string; public readonly updatedDate: string; public readonly owner: User; - public contact: OrganizationContact; + public contact: SerializedOrganizationContact; public name: string; public description: string; - constructor(initialData: RawOrganizationData) { - const data: RawOrganizationData = { + constructor(initialData: SerializedOrganization) { + const data: SerializedOrganization = { id: undefined, slug: undefined, name: undefined, @@ -354,7 +338,7 @@ Object.defineProperties(Organization.prototype.leave, { }); const [membership] = result.results; if (!membership) { - throw new ServerError( + throw new DataError( `Could not find membership for user ${user.username} in organization ${this.slug}`, ); } diff --git a/cvat-core/src/quality-settings.ts b/cvat-core/src/quality-settings.ts index 2ff4ab9001e..73f9245d131 100644 --- a/cvat-core/src/quality-settings.ts +++ b/cvat-core/src/quality-settings.ts @@ -2,26 +2,10 @@ // // SPDX-License-Identifier: MIT +import { SerializedQualitySettingsData } from './server-response-types'; import PluginRegistry from './plugins'; import serverProxy from './server-proxy'; -export interface SerializedQualitySettingsData { - id?: number; - task?: number; - iou_threshold?: number; - oks_sigma?: number; - line_thickness?: number; - low_overlap_threshold?: number; - compare_line_orientation?: boolean; - line_orientation_threshold?: number; - compare_groups?: boolean; - group_match_threshold?: number; - check_covered_annotations?: boolean; - object_visibility_threshold?: number; - panoptic_comparison?: boolean; - compare_attributes?: boolean; -} - export default class QualitySettings { #id: number; #task: number; diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index aaf5f39d99c..0fabf6ffb20 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -7,14 +7,15 @@ import FormData from 'form-data'; import store from 'store'; import Axios, { AxiosError, AxiosResponse } from 'axios'; import * as tus from 'tus-js-client'; +import { ChunkQuality } from 'cvat-data'; + import { SerializedLabel, SerializedAnnotationFormats, ProjectsFilter, - SerializedProject, SerializedTask, TasksFilter, SerializedUser, - SerializedAbout, SerializedRemoteFile, SerializedUserAgreement, + SerializedProject, SerializedTask, TasksFilter, SerializedUser, SerializedOrganization, + SerializedAbout, SerializedRemoteFile, SerializedUserAgreement, FunctionsResponseBody, SerializedRegister, JobsFilter, SerializedJob, SerializedGuide, SerializedAsset, -} from 'server-response-types'; -import { SerializedQualityReportData } from 'quality-report'; -import { SerializedQualitySettingsData } from 'quality-settings'; +} from './server-response-types'; +import { SerializedQualityReportData } from './quality-report'; import { SerializedAnalyticsReport } from './analytics-report'; import { Storage } from './storage'; import { StorageLocation, WebhookSourceType } from './enums'; @@ -22,7 +23,6 @@ import { isEmail, isResourceURL } from './common'; import config from './config'; import DownloadWorker from './download.worker'; import { ServerError } from './exceptions'; -import { FunctionsResponseBody } from './server-response-types'; import { SerializedQualityConflictData } from './quality-conflict'; type Params = { @@ -1429,25 +1429,23 @@ function getPreview(instance: 'projects' | 'tasks' | 'jobs' | 'cloudstorages' | response = await Axios.get(url, { responseType: 'blob', }); + + return response.data; } catch (errorData) { const code = errorData.response ? errorData.response.status : errorData.code; + if (code === 404) { + return null; + } throw new ServerError(`Could not get preview for "${instance}/${id}"`, code); } - - if (response.status === 404) { - return null; - } - - return response.data; }; } async function getImageContext(jid: number, frame: number): Promise { const { backendAPI } = config; - let response = null; try { - response = await Axios.get(`${backendAPI}/jobs/${jid}/data`, { + const response = await Axios.get(`${backendAPI}/jobs/${jid}/data`, { params: { quality: 'original', type: 'context_image', @@ -1455,27 +1453,28 @@ async function getImageContext(jid: number, frame: number): Promise }, responseType: 'arraybuffer', }); + + return response.data; } catch (errorData) { throw generateError(errorData); } - - return response.data; } -async function getData(jid: number, chunk: number): Promise { +async function getData(jid: number, chunk: number, quality: ChunkQuality): Promise { const { backendAPI } = config; - let response = null; try { - response = await workerAxios.get(`${backendAPI}/jobs/${jid}/data`, { + const response = await workerAxios.get(`${backendAPI}/jobs/${jid}/data`, { params: { ...enableOrganization(), - quality: 'compressed', + quality, type: 'chunk', number: chunk, }, responseType: 'arraybuffer', }); + + return response; } catch (errorData) { throw generateError({ message: '', @@ -1485,8 +1484,6 @@ async function getData(jid: number, chunk: number): Promise { }, }); } - - return response; } export interface RawFramesMetaData { @@ -1958,7 +1955,7 @@ async function getOrganizations() { return response.results; } -async function createOrganization(data) { +async function createOrganization(data: SerializedOrganization): Promise { const { backendAPI } = config; let response = null; @@ -1973,7 +1970,9 @@ async function createOrganization(data) { return response.data; } -async function updateOrganization(id, data) { +async function updateOrganization( + id: number, data: Partial, +): Promise { const { backendAPI } = config; let response = null; @@ -1986,7 +1985,7 @@ async function updateOrganization(id, data) { return response.data; } -async function deleteOrganization(id) { +async function deleteOrganization(id: number): Promise { const { backendAPI } = config; try { @@ -2043,7 +2042,7 @@ async function updateOrganizationMembership(membershipId, data) { return response.data; } -async function deleteOrganizationMembership(membershipId) { +async function deleteOrganizationMembership(membershipId: number): Promise { const { backendAPI } = config; try { diff --git a/cvat-core/src/server-response-types.ts b/cvat-core/src/server-response-types.ts index 5b96ee21e8a..ef4cfadef61 100644 --- a/cvat-core/src/server-response-types.ts +++ b/cvat-core/src/server-response-types.ts @@ -198,3 +198,37 @@ export interface SerializedAsset { created_date: string; owner: SerializedUser; } + +export interface SerializedOrganizationContact { + email?: string; + location?: string; + phoneNumber?: string +} + +export interface SerializedOrganization { + id?: number, + slug?: string, + name?: string, + description?: string, + created_date?: string, + updated_date?: string, + owner?: any, + contact?: SerializedOrganizationContact, +} + +export interface SerializedQualitySettingsData { + id?: number; + task?: number; + iou_threshold?: number; + oks_sigma?: number; + line_thickness?: number; + low_overlap_threshold?: number; + compare_line_orientation?: boolean; + line_orientation_threshold?: number; + compare_groups?: boolean; + group_match_threshold?: number; + check_covered_annotations?: boolean; + object_visibility_threshold?: number; + panoptic_comparison?: boolean; + compare_attributes?: boolean; +} diff --git a/cvat-core/src/session-implementation.ts b/cvat-core/src/session-implementation.ts index 036f3c74be4..ea57c1881f1 100644 --- a/cvat-core/src/session-implementation.ts +++ b/cvat-core/src/session-implementation.ts @@ -129,6 +129,7 @@ export function implementJob(Job) { isPlaying, step, this.dimension, + (chunkNumber, quality) => this.frames.chunk(chunkNumber, quality), ); return frameData; }; @@ -179,6 +180,11 @@ export function implementJob(Job) { return result; }; + Job.prototype.frames.chunk.implementation = async function (chunkNumber, quality) { + const result = await serverProxy.frames.getData(this.id, chunkNumber, quality); + return result; + }; + Job.prototype.frames.search.implementation = async function (filters, frameFrom, frameTo) { if (typeof filters !== 'object') { throw new ArgumentError('Filters should be an object'); @@ -656,6 +662,14 @@ export function implementTask(Task) { return null; }; + Task.prototype.frames.contextImage.implementation = async function () { + throw new Error('Not implemented'); + }; + + Task.prototype.frames.chunk.implementation = async function () { + throw new Error('Not implemented'); + }; + // TODO: Check filter for annotations Task.prototype.annotations.get.implementation = async function (frame, allTracks, filters) { if (!Array.isArray(filters) || filters.some((filter) => typeof filter !== 'string')) { diff --git a/cvat-core/src/session.ts b/cvat-core/src/session.ts index 96e355e64d7..4f34ed458cc 100644 --- a/cvat-core/src/session.ts +++ b/cvat-core/src/session.ts @@ -236,6 +236,15 @@ function buildDuplicatedAPI(prototype) { ); return result; }, + async chunk(chunkNumber, quality) { + const result = await PluginRegistry.apiWrapper.call( + this, + prototype.frames.chunk, + chunkNumber, + quality, + ); + return result; + }, }, writable: true, }), @@ -364,6 +373,7 @@ export class Job extends Session { preview: CallableFunction; contextImage: CallableFunction; search: CallableFunction; + chunk: CallableFunction; }; public logger: { @@ -567,6 +577,7 @@ export class Job extends Session { preview: Object.getPrototypeOf(this).frames.preview.bind(this), search: Object.getPrototypeOf(this).frames.search.bind(this), contextImage: Object.getPrototypeOf(this).frames.contextImage.bind(this), + chunk: Object.getPrototypeOf(this).frames.chunk.bind(this), }; this.logger = { @@ -677,6 +688,7 @@ export class Task extends Session { preview: CallableFunction; contextImage: CallableFunction; search: CallableFunction; + chunk: CallableFunction; }; public logger: { @@ -1093,6 +1105,7 @@ export class Task extends Session { preview: Object.getPrototypeOf(this).frames.preview.bind(this), contextImage: Object.getPrototypeOf(this).frames.contextImage.bind(this), search: Object.getPrototypeOf(this).frames.search.bind(this), + chunk: Object.getPrototypeOf(this).frames.chunk.bind(this), }; this.logger = { diff --git a/cvat-core/tests/api/frames.js b/cvat-core/tests/api/frames.js index 8f9299ab289..0ee20eb9fda 100644 --- a/cvat-core/tests/api/frames.js +++ b/cvat-core/tests/api/frames.js @@ -88,22 +88,6 @@ describe('Feature: delete/restore frame', () => { }); }); -describe('Feature: get frame data', () => { - test('get frame data for a task', async () => { - const task = (await window.cvat.tasks.get({ id: 100 }))[0]; - const frame = await task.frames.get(0); - const frameData = await frame.data(); - expect(typeof frameData).toBe('string'); - }); - - test('get frame data for a job', async () => { - const job = (await window.cvat.jobs.get({ jobID: 100 }))[0]; - const frame = await job.frames.get(0); - const frameData = await frame.data(); - expect(typeof frameData).toBe('string'); - }); -}); - describe('Feature: get frame preview', () => { test('get frame preview for a task', async () => { const task = (await window.cvat.tasks.get({ id: 100 }))[0]; diff --git a/cvat-core/tests/mocks/server-proxy.mock.js b/cvat-core/tests/mocks/server-proxy.mock.js index a42889eac71..06020c20a84 100644 --- a/cvat-core/tests/mocks/server-proxy.mock.js +++ b/cvat-core/tests/mocks/server-proxy.mock.js @@ -380,12 +380,29 @@ class ServerProxy { return 'DUMMY_IMAGE'; } - async function getMeta(session, jid) { + async function getMeta(session, id) { if (session !== 'job') { - throw new Error('not implemented test'); + const task = tasksDummyData.results.find((task) => task.id === id); + const jobs = jobsDummyData.results.filter((job) => job.task_id === id); + const jobsMeta = jobs.map((job) => frameMetaDummyData[job.id]).flat(); + let framesMeta = jobsMeta.map((jobMeta) => jobMeta.frames); + if (task.mode === 'interpolation') { + framesMeta = [framesMeta[0]]; + } + + return { + chunk_size: jobsMeta[0].chunk_size , + size: task.size, + image_quality: task.image_quality, + start_frame: task.start_frame, + stop_frame: task.stop_frame, + frames: framesMeta, + deleted_frames: [], + included_frames: [], + }; } - return JSON.parse(JSON.stringify(frameMetaDummyData[jid])); + return JSON.parse(JSON.stringify(frameMetaDummyData[id])); } async function saveMeta(session, jid, meta) { diff --git a/cvat-data/package.json b/cvat-data/package.json index 63c1be63d62..375a1f23cee 100644 --- a/cvat-data/package.json +++ b/cvat-data/package.json @@ -1,6 +1,6 @@ { "name": "cvat-data", - "version": "1.1.0", + "version": "2.0.0", "description": "", "main": "src/ts/cvat-data.ts", "scripts": { diff --git a/cvat-data/src/ts/cvat-data.ts b/cvat-data/src/ts/cvat-data.ts index 78d0bcf558b..00768a04351 100644 --- a/cvat-data/src/ts/cvat-data.ts +++ b/cvat-data/src/ts/cvat-data.ts @@ -8,35 +8,43 @@ import { MP4Reader, Bytestream } from './3rdparty/mp4'; import ZipDecoder from './unzip_imgs.worker'; import H264Decoder from './3rdparty/Decoder.worker'; +export class RequestOutdatedError extends Error {} + export enum BlockType { MP4VIDEO = 'mp4video', ARCHIVE = 'archive', } +export enum ChunkQuality { + ORIGINAL = 'original', + COMPRESSED = 'compressed', +} + export enum DimensionType { DIMENSION_3D = '3d', DIMENSION_2D = '2d', } -export function decodeZip( - block: any, start: number, end: number, dimension: any, +export function decodeContextImages( + block: any, start: number, end: number, ): Promise> { + const decodeZipWorker = ((decodeContextImages as any).zipWorker || new (ZipDecoder as any)()) as Worker; + (decodeContextImages as any).zipWorker = decodeZipWorker; return new Promise((resolve, reject) => { - decodeZip.mutex.acquire().then((release) => { - const worker = new ZipDecoder(); + decodeContextImages.mutex.acquire().then((release) => { const result: Record = {}; let decoded = 0; - worker.onerror = (e: ErrorEvent) => { + decodeZipWorker.onerror = (e: ErrorEvent) => { release(); - worker.terminate(); reject(new Error(`Archive can not be decoded. ${e.message}`)); }; - worker.onmessage = async (event) => { + decodeZipWorker.onmessage = async (event) => { const { error, fileName } = event.data; if (error) { - worker.onerror(new ErrorEvent('error', { message: error.toString() })); + decodeZipWorker.onerror(new ErrorEvent('error', { message: error.toString() })); + return; } const { data } = event.data; @@ -45,174 +53,136 @@ export function decodeZip( if (decoded === end) { release(); - worker.terminate(); resolve(result); } }; - worker.postMessage({ + decodeZipWorker.postMessage({ block, start, end, - dimension, + dimension: DimensionType.DIMENSION_2D, dimension2D: DimensionType.DIMENSION_2D, }); }); }); } -decodeZip.mutex = new Mutex(); +decodeContextImages.mutex = new Mutex(); interface BlockToDecode { start: number; end: number; block: ArrayBuffer; - resolveCallback: (frame: number) => void; - rejectCallback: (e: ErrorEvent) => void; + onDecodeAll(): void; + onDecode(frame: number, bitmap: ImageBitmap | Blob): void; + onReject(e: Error): void; } -export class FrameProvider { - private blocksRanges: string[]; - private blockSize: number; +export class FrameDecoder { private blockType: BlockType; - + private chunkSize: number; /* ImageBitmap when decode zip or video chunks Blob when 3D dimension null when not decoded yet */ - private frames: Record; - private requestedBlockToDecode: null | BlockToDecode; - private blocksAreBeingDecoded: Record; - private promisedFrames: Record void; - reject: () => void; - }>; - private currentDecodingThreads: number; - private currentFrame: number; + private decodedChunks: Record>; + private chunkIsBeingDecoded: BlockToDecode | null; + private requestedChunkToDecode: BlockToDecode | null; + private orderedStack: number[]; private mutex: Mutex; - private dimension: DimensionType; - private workerThreadsLimit: number; - private cachedEncodedBlocksLimit: number; - private cachedDecodedBlocksLimit: number; - + private cachedChunksLimit: number; // used for video chunks to get correct side after decoding private renderWidth: number; private renderHeight: number; + private zipWorker: Worker; constructor( blockType: BlockType, - blockSize: number, + chunkSize: number, cachedBlockCount: number, - decodedBlocksCacheSize = 5, - maxWorkerThreadCount = 2, dimension: DimensionType = DimensionType.DIMENSION_2D, ) { this.mutex = new Mutex(); - this.blocksRanges = []; - this.frames = {}; - this.promisedFrames = {}; - this.currentDecodingThreads = 0; - this.currentFrame = -1; - - this.cachedEncodedBlocksLimit = Math.max(1, cachedBlockCount); // number of stored blocks - this.cachedDecodedBlocksLimit = decodedBlocksCacheSize; - this.workerThreadsLimit = maxWorkerThreadCount; + this.orderedStack = []; + + this.cachedChunksLimit = Math.max(1, cachedBlockCount); this.dimension = dimension; this.renderWidth = 1920; this.renderHeight = 1080; - this.blockSize = blockSize; + this.chunkSize = chunkSize; this.blockType = blockType; - // todo: sort out with logic of blocks - this._blocks = {}; - this.requestedBlockToDecode = null; - this.blocksAreBeingDecoded = {}; - - setTimeout(this._checkDecodeRequests.bind(this), 100); + this.decodedChunks = {}; + this.requestedChunkToDecode = null; + this.chunkIsBeingDecoded = null; } - _checkDecodeRequests(): void { - if (this.requestedBlockToDecode !== null && this.currentDecodingThreads < this.workerThreadsLimit) { - this.startDecode().then(() => { - setTimeout(this._checkDecodeRequests.bind(this), 100); - }); - } else { - setTimeout(this._checkDecodeRequests.bind(this), 100); - } + isChunkCached(chunkNumber: number): boolean { + return chunkNumber in this.decodedChunks; } - isChunkCached(start: number, end: number): boolean { - // todo: always returns false because this.blocksRanges is Array, not dictionary - // but if try to correct other errors happens, need to debug.. - return `${start}:${end}` in this.blocksRanges; + hasFreeSpace(): boolean { + return Object.keys(this.decodedChunks).length < this.cachedChunksLimit; } - /* This method removes extra data from a cache when memory overflow */ - async _cleanup(): Promise { - if (this.blocksRanges.length > this.cachedEncodedBlocksLimit) { - const shifted = this.blocksRanges.shift(); // get the oldest block - const [start, end] = shifted.split(':').map((el) => +el); - delete this._blocks[Math.floor(start / this.blockSize)]; - for (let i = start; i <= end; i++) { - delete this.frames[i]; - } - } - - // delete frames whose are not in areas of current frame - const distance = Math.floor(this.cachedDecodedBlocksLimit / 2); - for (let i = 0; i < this.blocksRanges.length; i++) { - const [start, end] = this.blocksRanges[i].split(':').map((el) => +el); - if ( - end < this.currentFrame - distance * this.blockSize || - start > this.currentFrame + distance * this.blockSize - ) { - for (let j = start; j <= end; j++) { - delete this.frames[j]; - } + cleanup(extra = 1): void { + // argument allows us to specify how many chunks we want to write after clear + const chunks = Object.keys(this.decodedChunks).map((chunk: string) => +chunk); + let { length } = chunks; + while (length > this.cachedChunksLimit - Math.min(extra, this.cachedChunksLimit)) { + const lastChunk = this.orderedStack.pop(); + if (typeof lastChunk === 'undefined') { + return; } + delete this.decodedChunks[lastChunk]; + length--; } } - async requestDecodeBlock( + requestDecodeBlock( block: ArrayBuffer, start: number, end: number, - resolveCallback: () => void, - rejectCallback: () => void, - ): Promise { - const release = await this.mutex.acquire(); - try { - if (this.requestedBlockToDecode !== null) { - if (start === this.requestedBlockToDecode.start && end === this.requestedBlockToDecode.end) { - // only rewrite callbacks if the same block was requested again - this.requestedBlockToDecode.resolveCallback = resolveCallback; - this.requestedBlockToDecode.rejectCallback = rejectCallback; - - // todo: should we reject the previous request here? - } else if (this.requestedBlockToDecode.rejectCallback) { - // if another block requested, the previous request should be rejected - this.requestedBlockToDecode.rejectCallback(); - } + onDecode: (frame: number, bitmap: ImageBitmap | Blob) => void, + onDecodeAll: () => void, + onReject: (e: Error) => void, + ): void { + if (this.requestedChunkToDecode !== null) { + // a chunk was already requested to be decoded, but decoding didn't start yet + if (start === this.requestedChunkToDecode.start && end === this.requestedChunkToDecode.end) { + // it was the same chunk + this.requestedChunkToDecode.onReject(new RequestOutdatedError()); + + this.requestedChunkToDecode.onDecode = onDecode; + this.requestedChunkToDecode.onReject = onReject; + } else if (this.requestedChunkToDecode.onReject) { + // it was other chunk + this.requestedChunkToDecode.onReject(new RequestOutdatedError()); } + } else if (this.chunkIsBeingDecoded === null || this.chunkIsBeingDecoded.start !== start) { + // everything was decoded or decoding other chunk is in process + this.requestedChunkToDecode = { + block, + start, + end, + onDecode, + onDecodeAll, + onReject, + }; + } else { + // the same chunk is being decoded right now + // reject previous decoding request + this.chunkIsBeingDecoded.onReject(new RequestOutdatedError()); - if (!(`${start}:${end}` in this.blocksAreBeingDecoded)) { - this.requestedBlockToDecode = { - block: block || this._blocks[Math.floor(start / this.blockSize)], - start, - end, - resolveCallback, - rejectCallback, - }; - } else { - this.blocksAreBeingDecoded[`${start}:${end}`].rejectCallback = rejectCallback; - this.blocksAreBeingDecoded[`${start}:${end}`].resolveCallback = resolveCallback; - } - } finally { - release(); + this.chunkIsBeingDecoded.onReject = onReject; + this.chunkIsBeingDecoded.onDecode = onDecode; } + + this.startDecode(); } setRenderSize(width: number, height: number): void { @@ -220,50 +190,37 @@ export class FrameProvider { this.renderHeight = height; } - /* Method returns frame from collection. Else method returns null */ - async frame(frameNumber: number): Promise { - this.currentFrame = frameNumber; - return new Promise((resolve, reject) => { - if (frameNumber in this.frames) { - if (this.frames[frameNumber] !== null) { - resolve(this.frames[frameNumber]); - } else { - this.promisedFrames[frameNumber] = { resolve, reject }; - } - } else { - resolve(null); - } - }); - } - - isNextChunkExists(frameNumber: number): boolean { - const nextChunkNum = Math.floor(frameNumber / this.blockSize) + 1; - return nextChunkNum in this._blocks; - } + frame(frameNumber: number): ImageBitmap | Blob | null { + const chunkNumber = Math.floor(frameNumber / this.chunkSize); + if (chunkNumber in this.decodedChunks) { + return this.decodedChunks[chunkNumber][frameNumber]; + } - setReadyToLoading(chunkNumber: number): void { - this._blocks[chunkNumber] = 'loading'; + return null; } async startDecode(): Promise { + const blockToDecode = { ...this.requestedChunkToDecode }; const release = await this.mutex.acquire(); try { - const { start, end, block } = this.requestedBlockToDecode; - - this.blocksRanges.push(`${start}:${end}`); - this.blocksAreBeingDecoded[`${start}:${end}`] = this.requestedBlockToDecode; - this.requestedBlockToDecode = null; - this._blocks[Math.floor((start + 1) / this.blockSize)] = block; - - for (let i = start; i <= end; i++) { - this.frames[i] = null; + const { start, end, block } = this.requestedChunkToDecode; + if (start !== blockToDecode.start) { + // request is not relevant, another block was already requested + // it happens when A is being decoded, B comes and wait for mutex, C comes and wait for mutex + // B is not necessary anymore, because C already was requested + blockToDecode.onReject(new RequestOutdatedError()); + throw new RequestOutdatedError(); } - this._cleanup(); - this.currentDecodingThreads++; + const chunkNumber = Math.floor(start / this.chunkSize); + this.orderedStack = [chunkNumber, ...this.orderedStack]; + this.cleanup(); + const decodedFrames: Record = {}; + this.chunkIsBeingDecoded = this.requestedChunkToDecode; + this.requestedChunkToDecode = null; if (this.blockType === BlockType.MP4VIDEO) { - const worker = new H264Decoder(); + const worker = new H264Decoder() as any as Worker; let index = start; worker.onmessage = (e) => { @@ -281,46 +238,26 @@ export class FrameProvider { const array = new Uint8ClampedArray(e.data.buf.slice(0, width * height * 4)); createImageBitmap(new ImageData(array, width)).then((bitmap) => { - this.frames[keptIndex] = bitmap; - const { resolveCallback } = this.blocksAreBeingDecoded[`${start}:${end}`]; - if (resolveCallback) { - resolveCallback(keptIndex); - } - - if (keptIndex in this.promisedFrames) { - const { resolve } = this.promisedFrames[keptIndex]; - delete this.promisedFrames[keptIndex]; - resolve(this.frames[keptIndex]); - } + decodedFrames[keptIndex] = bitmap; + this.chunkIsBeingDecoded.onDecode(keptIndex, decodedFrames[keptIndex]); if (keptIndex === end) { + this.decodedChunks[chunkNumber] = decodedFrames; + this.chunkIsBeingDecoded.onDecodeAll(); + this.chunkIsBeingDecoded = null; worker.terminate(); - this.currentDecodingThreads--; - delete this.blocksAreBeingDecoded[`${start}:${end}`]; + release(); } }); index++; }; - worker.onerror = (e: ErrorEvent) => { + worker.onerror = () => { + release(); worker.terminate(); - this.currentDecodingThreads--; - - for (let i = index; i <= end; i++) { - // reject all the following frames - if (i in this.promisedFrames) { - const { reject } = this.promisedFrames[i]; - delete this.promisedFrames[i]; - reject(); - } - } - - if (this.blocksAreBeingDecoded[`${start}:${end}`].rejectCallback) { - this.blocksAreBeingDecoded[`${start}:${end}`].rejectCallback(e); - } - - delete this.blocksAreBeingDecoded[`${start}:${end}`]; + this.chunkIsBeingDecoded.onReject(new Error('Error occured during decode')); + this.chunkIsBeingDecoded = null; }; worker.postMessage({ @@ -339,58 +276,44 @@ export class FrameProvider { const sps = avc.sps[0]; const pps = avc.pps[0]; - /* Decode Sequence & Picture Parameter Sets */ worker.postMessage({ buf: sps, offset: 0, length: sps.length }); worker.postMessage({ buf: pps, offset: 0, length: pps.length }); - /* Decode Pictures */ for (let sample = 0; sample < video.getSampleCount(); sample++) { video.getSampleNALUnits(sample).forEach((nal) => { worker.postMessage({ buf: nal, offset: 0, length: nal.length }); }); } } else { - const worker = new ZipDecoder(); + this.zipWorker = this.zipWorker || new (ZipDecoder as any)() as any as Worker; let index = start; - worker.onmessage = async (event) => { - this.frames[event.data.index] = event.data.data; - - const { resolveCallback } = this.blocksAreBeingDecoded[`${start}:${end}`]; - if (resolveCallback) { - resolveCallback(event.data.index); + this.zipWorker.onmessage = async (event) => { + if (event.data.error) { + this.zipWorker.onerror(new ErrorEvent('error', { message: event.data.error.toString() })); + return; } - if (event.data.index in this.promisedFrames) { - const { resolve } = this.promisedFrames[event.data.index]; - delete this.promisedFrames[event.data.index]; - resolve(this.frames[event.data.index]); - } + decodedFrames[event.data.index] = event.data.data as ImageBitmap | Blob; + this.chunkIsBeingDecoded.onDecode(event.data.index, decodedFrames[event.data.index]); if (index === end) { - worker.terminate(); - this.currentDecodingThreads--; - delete this.blocksAreBeingDecoded[`${start}:${end}`]; + this.decodedChunks[chunkNumber] = decodedFrames; + this.chunkIsBeingDecoded.onDecodeAll(); + this.chunkIsBeingDecoded = null; + release(); } index++; }; - worker.onerror = (e: ErrorEvent) => { - for (let i = start; i <= end; i++) { - if (i in this.promisedFrames) { - const { reject } = this.promisedFrames[i]; - delete this.promisedFrames[i]; - reject(); - } - } - if (this.blocksAreBeingDecoded[`${start}:${end}`].rejectCallback) { - this.blocksAreBeingDecoded[`${start}:${end}`].rejectCallback(e); - } - this.currentDecodingThreads--; - worker.terminate(); + this.zipWorker.onerror = () => { + release(); + + this.chunkIsBeingDecoded.onReject(new Error('Error occured during decode')); + this.chunkIsBeingDecoded = null; }; - worker.postMessage({ + this.zipWorker.postMessage({ block, start, end, @@ -398,20 +321,23 @@ export class FrameProvider { dimension2D: DimensionType.DIMENSION_2D, }); } - } finally { + } catch (error) { + this.chunkIsBeingDecoded = null; release(); } } - get decodedBlocksCacheSize(): number { - return this.cachedDecodedBlocksLimit; + get cachedChunks(): number[] { + return Object.keys(this.decodedChunks).map((chunkNumber: string) => +chunkNumber).sort((a, b) => a - b); } - /* - Method returns a list of cached ranges - Is an array of strings like "start:end" - */ get cachedFrames(): string[] { - return [...this.blocksRanges].sort((a, b) => +a.split(':')[0] - +b.split(':')[0]); + const chunks = Object.keys(this.decodedChunks).map((chunkNumber: string) => +chunkNumber).sort((a, b) => a - b); + return chunks.map((chunk) => { + const frames = Object.keys(this.decodedChunks[chunk]).map((frame) => +frame); + const min = Math.min(...frames); + const max = Math.max(...frames); + return `${min}:${max}`; + }); } } diff --git a/cvat-ui/package.json b/cvat-ui/package.json index 5e9569c0502..46e6739ff9c 100644 --- a/cvat-ui/package.json +++ b/cvat-ui/package.json @@ -1,6 +1,6 @@ { "name": "cvat-ui", - "version": "1.54.1", + "version": "1.54.2", "description": "CVAT single-page application", "main": "src/index.tsx", "scripts": { diff --git a/cvat-ui/plugins/sam_plugin/src/ts/index.tsx b/cvat-ui/plugins/sam/src/ts/index.tsx similarity index 97% rename from cvat-ui/plugins/sam_plugin/src/ts/index.tsx rename to cvat-ui/plugins/sam/src/ts/index.tsx index fe030c03565..e4f358ab77e 100644 --- a/cvat-ui/plugins/sam_plugin/src/ts/index.tsx +++ b/cvat-ui/plugins/sam/src/ts/index.tsx @@ -132,7 +132,7 @@ function modelData( const samPlugin: SAMPlugin = { name: 'Segment Anything', - description: 'Plugin handles non-default SAM serverless function output', + description: 'Handles non-default SAM serverless function output', cvat: { jobs: { get: { @@ -287,7 +287,7 @@ const samPlugin: SAMPlugin = { }, }; -const SAMModelPlugin: ComponentBuilder = ({ core }) => { +const builder: ComponentBuilder = ({ core }) => { samPlugin.data.core = core; core.plugins.register(samPlugin); InferenceSession.create(samPlugin.data.modelURL).then((session) => { @@ -295,7 +295,7 @@ const SAMModelPlugin: ComponentBuilder = ({ core }) => { }); return { - name: 'Segment Anything model', + name: samPlugin.name, destructor: () => {}, }; }; @@ -303,7 +303,7 @@ const SAMModelPlugin: ComponentBuilder = ({ core }) => { function register(): void { if (Object.prototype.hasOwnProperty.call(window, 'cvatUI')) { (window as any as { cvatUI: { registerComponent: PluginEntryPoint } }) - .cvatUI.registerComponent(SAMModelPlugin); + .cvatUI.registerComponent(builder); } } diff --git a/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx b/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx index 5a97f23cf74..ef1ce7e6a4f 100644 --- a/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx +++ b/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx @@ -397,8 +397,20 @@ class OpenCVControlComponent extends React.PureComponent activeImageModifier .modifier.processImage(oldImageData, frame), imageData); const imageBitmap = await createImageBitmap(newImageData); - frameData.imageData = imageBitmap; - canvasInstance.setup(frameData, states, curZOrder); + const proxy = new Proxy(frameData, { + get: (_frameData, prop, receiver) => { + if (prop === 'data') { + return async () => ({ + renderWidth: imageData.width, + renderHeight: imageData.height, + imageData: imageBitmap, + }); + } + + return Reflect.get(_frameData, prop, receiver); + }, + }); + canvasInstance.setup(proxy, states, curZOrder); } } catch (error: any) { notification.error({ diff --git a/cvat-ui/src/components/header/settings-modal/player-settings.tsx b/cvat-ui/src/components/header/settings-modal/player-settings.tsx index 324243f3afb..ab8dbab42c0 100644 --- a/cvat-ui/src/components/header/settings-modal/player-settings.tsx +++ b/cvat-ui/src/components/header/settings-modal/player-settings.tsx @@ -1,4 +1,5 @@ // Copyright (C) 2020-2022 Intel Corporation +// Copyright (C) 2023 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -18,6 +19,7 @@ import { clamp } from 'utils/math'; import { BackJumpIcon, ForwardJumpIcon } from 'icons'; import { FrameSpeed } from 'reducers'; import config from 'config'; +import { usePlugins } from 'utils/hooks'; interface Props { frameStep: number; @@ -54,177 +56,201 @@ export default function PlayerSettingsComponent(props: Props): JSX.Element { onSwitchShowingDeletedFrames, } = props; + const plugins = usePlugins((state) => state.plugins.components.settings.player, props); + const minFrameStep = 2; const maxFrameStep = 1000; - return ( -

- - - Player step - { - if (typeof value !== 'undefined' && value !== null) { - onChangeFrameStep(Math.floor(clamp(+value, minFrameStep, maxFrameStep))); - } - }} - /> - - - - Number of frames skipped when selecting - - or - - - - - - - Player speed - { + onChangeFrameSpeed(speed); + }} + > + - - Fastest - - - Fast - - - Usual - - - Slow - - - Slower - - - Slowest - - - - - - - onChangeCanvasBackgroundColor(e.hex)} - /> - )} - overlayClassName='canvas-background-color-picker-popover' - trigger='click' + Fastest + + + Fast + + + Usual + + + Slow + + + Slower + + + Slowest + + + + + ), 10]); + + items.push([( + + + onChangeCanvasBackgroundColor(e.hex)} + /> + )} + overlayClassName='canvas-background-color-picker-popover' + trigger='click' + > + - - - - - - - - { - onSwitchResetZoom(event.target.checked); - }} - > - Reset zoom - - - - Fit image after changing frame - - - - - - - { - onSwitchRotateAll(event.target.checked); - }} - > - Rotate all images - - - - Rotate all images simultaneously - - - - - - - - - { - onSwitchSmoothImage(event.target.checked); - }} - > - Smooth image - - - - Smooth image when zoom-in it - - - - - + Rotate all images + + + + Rotate all images simultaneously + + + + + ), 30]); + + items.push([( + + + + { - onSwitchShowingDeletedFrames(event.target.checked); + onSwitchSmoothImage(event.target.checked); }} > - Show deleted frames + Smooth image - - - You will be able to navigate and restore deleted frames - - - + + + Smooth image when zoom-in it + + + + + + { + onSwitchShowingDeletedFrames(event.target.checked); + }} + > + Show deleted frames + + + + You will be able to navigate and restore deleted frames + + + + ), 40]); + + items.push(...plugins.map(({ component: Component, weight }, index: number) => ( + [, weight] as [JSX.Element, number] + ))); + + return ( +
+ { items.sort((item1, item2) => item1[1] - item2[1]) + .map((item) => item[0]) }
); } diff --git a/cvat-ui/src/components/header/settings-modal/styles.scss b/cvat-ui/src/components/header/settings-modal/styles.scss index 8f4ce00edab..4d6c496b642 100644 --- a/cvat-ui/src/components/header/settings-modal/styles.scss +++ b/cvat-ui/src/components/header/settings-modal/styles.scss @@ -2,7 +2,7 @@ // // SPDX-License-Identifier: MIT -@import '../../../base.scss'; +@import '../../../base'; .cvat-settings-tabs { height: 100%; @@ -24,24 +24,6 @@ padding: 24px; } -.cvat-workspace-settings-auto-save, -.cvat-workspace-settings-autoborders, -.cvat-workspace-settings-intelligent-polygon-cropping, -.cvat-workspace-settings-show-text-always, -.cvat-workspace-settings-show-interpolated, -.cvat-workspace-settings-show-deleted, -.cvat-workspace-settings-approx-poly-threshold, -.cvat-workspace-settings-aam-zoom-margin, -.cvat-workspace-settings-show-frame-tags, -.cvat-workspace-settings-text-settings, -.cvat-workspace-settings-control-points-size { - margin-bottom: $grid-unit-size * 3; - - > div:first-child { - margin-bottom: $grid-unit-size; - } -} - .cvat-workspace-settings-text-content { width: 100%; } @@ -50,13 +32,7 @@ user-select: none; } -.cvat-player-settings-step, -.cvat-player-settings-speed, -.cvat-player-settings-reset-zoom, -.cvat-player-settings-rotate-all, -.cvat-player-settings-canvas-background, -.cvat-workspace-settings-aam-zoom-margin, -.cvat-workspace-settings-auto-save-interval { +.cvat-player-setting { margin-bottom: $grid-unit-size * 3; } diff --git a/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx b/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx index a55d6ae8ec7..e624e7c4adf 100644 --- a/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx +++ b/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx @@ -85,8 +85,8 @@ function WorkspaceSettingsComponent(props: Props): JSX.Element { return (
- - + + - - Auto save every minutes - + - + - + Content of a text @@ -172,7 +171,7 @@ function WorkspaceSettingsComponent(props: Props): JSX.Element { - + Position of a text @@ -199,7 +198,7 @@ function WorkspaceSettingsComponent(props: Props): JSX.Element { /> - + - + Try to crop polygons automatically when editing - + Show frame tags in the corner of the workspace - + Attribute annotation mode (AAM) zoom margin - + Control points size - + Default number of points in polygon approximation @@ -296,7 +295,7 @@ function WorkspaceSettingsComponent(props: Props): JSX.Element { marks={marks} /> - + Works for serverless interactors and OpenCV scissors diff --git a/cvat-ui/src/reducers/index.ts b/cvat-ui/src/reducers/index.ts index af2c8d9ab1d..f29b7dff8f9 100644 --- a/cvat-ui/src/reducers/index.ts +++ b/cvat-ui/src/reducers/index.ts @@ -312,6 +312,9 @@ export interface PluginsState { player: PluginComponent[]; }; }; + settings: { + player: PluginComponent[], + } router: PluginComponent[]; loggedInModals: PluginComponent[]; } diff --git a/cvat-ui/src/reducers/plugins-reducer.ts b/cvat-ui/src/reducers/plugins-reducer.ts index 49ff3f77cb8..aaedad46dcb 100644 --- a/cvat-ui/src/reducers/plugins-reducer.ts +++ b/cvat-ui/src/reducers/plugins-reducer.ts @@ -44,6 +44,9 @@ const defaultState: PluginsState = { }, router: [], loggedInModals: [], + settings: { + player: [], + }, }, }; diff --git a/cvat-ui/webpack.config.js b/cvat-ui/webpack.config.js index e44e20cc57f..467233f9790 100644 --- a/cvat-ui/webpack.config.js +++ b/cvat-ui/webpack.config.js @@ -15,7 +15,7 @@ const CopyPlugin = require('copy-webpack-plugin'); module.exports = (env) => { const defaultAppConfig = path.join(__dirname, 'src/config.tsx'); - const defaultPlugins = ['plugins/sam_plugin']; + const defaultPlugins = ['plugins/sam']; const appConfigFile = process.env.UI_APP_CONFIG ? process.env.UI_APP_CONFIG : defaultAppConfig; const pluginsList = process.env.CLIENT_PLUGINS ? [...defaultPlugins, ...process.env.CLIENT_PLUGINS.split(':')] .map((s) => s.trim()).filter((s) => !!s) : defaultPlugins; diff --git a/yarn.lock b/yarn.lock index b48ea52b318..0c1047660b8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3356,11 +3356,6 @@ braces@^3.0.2, braces@~3.0.2: dependencies: fill-range "^7.0.1" -browser-or-node@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/browser-or-node/-/browser-or-node-2.1.1.tgz#738790b3a86a8fc020193fa581273fbe65eaea0f" - integrity sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg== - browser-process-hrtime@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" @@ -4155,7 +4150,7 @@ custom-error-instance@2.1.1: three "^0.126.1" "cvat-canvas@link:./cvat-canvas": - version "2.17.1" + version "2.17.3" dependencies: "@types/fabric" "^4.5.7" "@types/polylabel" "^1.0.5" @@ -4168,11 +4163,10 @@ custom-error-instance@2.1.1: svg.select.js "3.0.1" "cvat-core@link:./cvat-core": - version "9.3.0" + version "10.0.1" dependencies: "@types/lodash" "^4.14.191" axios "^0.27.2" - browser-or-node "^2.0.0" cvat-data "link:./cvat-data" detect-browser "^5.2.1" error-stack-parser "^2.0.2" @@ -4186,7 +4180,7 @@ custom-error-instance@2.1.1: tus-js-client "^3.0.1" "cvat-data@link:./cvat-data": - version "1.1.0" + version "2.0.0" dependencies: async-mutex "^0.4.0" jszip "3.10.1" From 70fc428311365b626962f92a03101288eb75ceb3 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Wed, 9 Aug 2023 15:23:59 +0300 Subject: [PATCH 19/32] Remove the YOLOv5 serverless function (#6618) Due to YOLOv5's recent license change to the AGPL () it is no longer acceptable to use it in the project as a matter of policy. --- CHANGELOG.md | 3 +- README.md | 1 - .../yolov5/nuclio/function-gpu.yaml | 127 ------------------ .../ultralytics/yolov5/nuclio/function.yaml | 123 ----------------- .../pytorch/ultralytics/yolov5/nuclio/main.py | 40 ------ .../en/docs/manual/advanced/ai-tools.md | 1 - .../manual/advanced/automatic-annotation.md | 1 - 7 files changed, 2 insertions(+), 294 deletions(-) delete mode 100644 serverless/pytorch/ultralytics/yolov5/nuclio/function-gpu.yaml delete mode 100644 serverless/pytorch/ultralytics/yolov5/nuclio/function.yaml delete mode 100644 serverless/pytorch/ultralytics/yolov5/nuclio/main.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 872136a260c..f34bfcef5fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed -- TDB +- The YOLOv5 serverless function + () ### Fixed diff --git a/README.md b/README.md index 3192c62a898..b6d28199bde 100644 --- a/README.md +++ b/README.md @@ -193,7 +193,6 @@ up to 10x. Here is a list of the algorithms we support, and the platforms they c | [Object reidentification](/serverless/openvino/omz/intel/person-reidentification-retail-0277/nuclio) | reid | OpenVINO | ✔️ | | | [Semantic segmentation for ADAS](/serverless/openvino/omz/intel/semantic-segmentation-adas-0001/nuclio) | detector | OpenVINO | ✔️ | | | [Text detection v4](/serverless/openvino/omz/intel/text-detection-0004/nuclio) | detector | OpenVINO | ✔️ | | -| [YOLO v5](/serverless/pytorch/ultralytics/yolov5/nuclio) | detector | PyTorch | ✔️ | | | [SiamMask](/serverless/pytorch/foolwood/siammask/nuclio) | tracker | PyTorch | ✔️ | ✔️ | | [TransT](/serverless/pytorch/dschoerk/transt/nuclio) | tracker | PyTorch | ✔️ | ✔️ | | [f-BRS](/serverless/pytorch/saic-vul/fbrs/nuclio) | interactor | PyTorch | ✔️ | | diff --git a/serverless/pytorch/ultralytics/yolov5/nuclio/function-gpu.yaml b/serverless/pytorch/ultralytics/yolov5/nuclio/function-gpu.yaml deleted file mode 100644 index d2caf2c9783..00000000000 --- a/serverless/pytorch/ultralytics/yolov5/nuclio/function-gpu.yaml +++ /dev/null @@ -1,127 +0,0 @@ -metadata: - name: pth-ultralytics-yolov5 - namespace: cvat - annotations: - name: YOLO v5 - type: detector - framework: pytorch - spec: | - [ - { "id": 0, "name": "person" }, - { "id": 1, "name": "bicycle" }, - { "id": 2, "name": "car" }, - { "id": 3, "name": "motorbike" }, - { "id": 4, "name": "aeroplane" }, - { "id": 5, "name": "bus" }, - { "id": 6, "name": "train" }, - { "id": 7, "name": "truck" }, - { "id": 8, "name": "boat" }, - { "id": 9, "name": "traffic light" }, - { "id": 10, "name": "fire hydrant" }, - { "id": 11, "name": "stop sign" }, - { "id": 12, "name": "parking meter" }, - { "id": 13, "name": "bench" }, - { "id": 14, "name": "bird" }, - { "id": 15, "name": "cat" }, - { "id": 16, "name": "dog" }, - { "id": 17, "name": "horse" }, - { "id": 18, "name": "sheep" }, - { "id": 19, "name": "cow" }, - { "id": 20, "name": "elephant" }, - { "id": 21, "name": "bear" }, - { "id": 22, "name": "zebra" }, - { "id": 23, "name": "giraffe" }, - { "id": 24, "name": "backpack" }, - { "id": 25, "name": "umbrella" }, - { "id": 26, "name": "handbag" }, - { "id": 27, "name": "tie" }, - { "id": 28, "name": "suitcase" }, - { "id": 29, "name": "frisbee" }, - { "id": 30, "name": "skis" }, - { "id": 31, "name": "snowboard" }, - { "id": 32, "name": "sports ball" }, - { "id": 33, "name": "kite" }, - { "id": 34, "name": "baseball bat" }, - { "id": 35, "name": "baseball glove" }, - { "id": 36, "name": "skateboard" }, - { "id": 37, "name": "surfboard" }, - { "id": 38, "name": "tennis racket" }, - { "id": 39, "name": "bottle" }, - { "id": 40, "name": "wine glass" }, - { "id": 41, "name": "cup" }, - { "id": 42, "name": "fork" }, - { "id": 43, "name": "knife" }, - { "id": 44, "name": "spoon" }, - { "id": 45, "name": "bowl" }, - { "id": 46, "name": "banana" }, - { "id": 47, "name": "apple" }, - { "id": 48, "name": "sandwich" }, - { "id": 49, "name": "orange" }, - { "id": 50, "name": "broccoli" }, - { "id": 51, "name": "carrot" }, - { "id": 52, "name": "hot dog" }, - { "id": 53, "name": "pizza" }, - { "id": 54, "name": "donut" }, - { "id": 55, "name": "cake" }, - { "id": 56, "name": "chair" }, - { "id": 57, "name": "sofa" }, - { "id": 58, "name": "pottedplant" }, - { "id": 59, "name": "bed" }, - { "id": 60, "name": "diningtable" }, - { "id": 61, "name": "toilet" }, - { "id": 62, "name": "tvmonitor" }, - { "id": 63, "name": "laptop" }, - { "id": 64, "name": "mouse" }, - { "id": 65, "name": "remote" }, - { "id": 66, "name": "keyboard" }, - { "id": 67, "name": "cell phone" }, - { "id": 68, "name": "microwave" }, - { "id": 69, "name": "oven" }, - { "id": 70, "name": "toaster" }, - { "id": 71, "name": "sink" }, - { "id": 72, "name": "refrigerator" }, - { "id": 73, "name": "book" }, - { "id": 74, "name": "clock" }, - { "id": 75, "name": "vase" }, - { "id": 76, "name": "scissors" }, - { "id": 77, "name": "teddy bear" }, - { "id": 78, "name": "hair drier" }, - { "id": 79, "name": "toothbrush" } - ] - -spec: - description: YOLO v5 via pytorch hub - runtime: 'python:3.6' - handler: main:handler - eventTimeout: 30s - build: - image: cvat.pth.ultralytics.yolov5 - baseImage: ultralytics/yolov5:latest - - directives: - preCopy: - - kind: USER - value: root - - kind: RUN - value: apt update && apt install --no-install-recommends -y libglib2.0-0 - - kind: WORKDIR - value: /opt/nuclio - - triggers: - myHttpTrigger: - maxWorkers: 1 - kind: 'http' - workerAvailabilityTimeoutMilliseconds: 10000 - attributes: - maxRequestBodySize: 33554432 # 32MB - - resources: - limits: - nvidia.com/gpu: 1 - - platform: - attributes: - restartPolicy: - name: always - maximumRetryCount: 3 - mountMode: volume diff --git a/serverless/pytorch/ultralytics/yolov5/nuclio/function.yaml b/serverless/pytorch/ultralytics/yolov5/nuclio/function.yaml deleted file mode 100644 index 22e794d1d86..00000000000 --- a/serverless/pytorch/ultralytics/yolov5/nuclio/function.yaml +++ /dev/null @@ -1,123 +0,0 @@ -metadata: - name: pth-ultralytics-yolov5 - namespace: cvat - annotations: - name: YOLO v5 - type: detector - framework: pytorch - spec: | - [ - { "id": 0, "name": "person" }, - { "id": 1, "name": "bicycle" }, - { "id": 2, "name": "car" }, - { "id": 3, "name": "motorbike" }, - { "id": 4, "name": "aeroplane" }, - { "id": 5, "name": "bus" }, - { "id": 6, "name": "train" }, - { "id": 7, "name": "truck" }, - { "id": 8, "name": "boat" }, - { "id": 9, "name": "traffic light" }, - { "id": 10, "name": "fire hydrant" }, - { "id": 11, "name": "stop sign" }, - { "id": 12, "name": "parking meter" }, - { "id": 13, "name": "bench" }, - { "id": 14, "name": "bird" }, - { "id": 15, "name": "cat" }, - { "id": 16, "name": "dog" }, - { "id": 17, "name": "horse" }, - { "id": 18, "name": "sheep" }, - { "id": 19, "name": "cow" }, - { "id": 20, "name": "elephant" }, - { "id": 21, "name": "bear" }, - { "id": 22, "name": "zebra" }, - { "id": 23, "name": "giraffe" }, - { "id": 24, "name": "backpack" }, - { "id": 25, "name": "umbrella" }, - { "id": 26, "name": "handbag" }, - { "id": 27, "name": "tie" }, - { "id": 28, "name": "suitcase" }, - { "id": 29, "name": "frisbee" }, - { "id": 30, "name": "skis" }, - { "id": 31, "name": "snowboard" }, - { "id": 32, "name": "sports ball" }, - { "id": 33, "name": "kite" }, - { "id": 34, "name": "baseball bat" }, - { "id": 35, "name": "baseball glove" }, - { "id": 36, "name": "skateboard" }, - { "id": 37, "name": "surfboard" }, - { "id": 38, "name": "tennis racket" }, - { "id": 39, "name": "bottle" }, - { "id": 40, "name": "wine glass" }, - { "id": 41, "name": "cup" }, - { "id": 42, "name": "fork" }, - { "id": 43, "name": "knife" }, - { "id": 44, "name": "spoon" }, - { "id": 45, "name": "bowl" }, - { "id": 46, "name": "banana" }, - { "id": 47, "name": "apple" }, - { "id": 48, "name": "sandwich" }, - { "id": 49, "name": "orange" }, - { "id": 50, "name": "broccoli" }, - { "id": 51, "name": "carrot" }, - { "id": 52, "name": "hot dog" }, - { "id": 53, "name": "pizza" }, - { "id": 54, "name": "donut" }, - { "id": 55, "name": "cake" }, - { "id": 56, "name": "chair" }, - { "id": 57, "name": "sofa" }, - { "id": 58, "name": "pottedplant" }, - { "id": 59, "name": "bed" }, - { "id": 60, "name": "diningtable" }, - { "id": 61, "name": "toilet" }, - { "id": 62, "name": "tvmonitor" }, - { "id": 63, "name": "laptop" }, - { "id": 64, "name": "mouse" }, - { "id": 65, "name": "remote" }, - { "id": 66, "name": "keyboard" }, - { "id": 67, "name": "cell phone" }, - { "id": 68, "name": "microwave" }, - { "id": 69, "name": "oven" }, - { "id": 70, "name": "toaster" }, - { "id": 71, "name": "sink" }, - { "id": 72, "name": "refrigerator" }, - { "id": 73, "name": "book" }, - { "id": 74, "name": "clock" }, - { "id": 75, "name": "vase" }, - { "id": 76, "name": "scissors" }, - { "id": 77, "name": "teddy bear" }, - { "id": 78, "name": "hair drier" }, - { "id": 79, "name": "toothbrush" } - ] - -spec: - description: YOLO v5 via pytorch hub - runtime: 'python:3.6' - handler: main:handler - eventTimeout: 30s - build: - image: cvat.pth.ultralytics.yolov5 - baseImage: ultralytics/yolov5:latest-cpu - - directives: - preCopy: - - kind: USER - value: root - - kind: RUN - value: apt update && apt install --no-install-recommends -y libglib2.0-0 - - kind: WORKDIR - value: /opt/nuclio - - triggers: - myHttpTrigger: - maxWorkers: 2 - kind: 'http' - workerAvailabilityTimeoutMilliseconds: 10000 - attributes: - maxRequestBodySize: 33554432 # 32MB - - platform: - attributes: - restartPolicy: - name: always - maximumRetryCount: 3 - mountMode: volume diff --git a/serverless/pytorch/ultralytics/yolov5/nuclio/main.py b/serverless/pytorch/ultralytics/yolov5/nuclio/main.py deleted file mode 100644 index 92bcf2e02a3..00000000000 --- a/serverless/pytorch/ultralytics/yolov5/nuclio/main.py +++ /dev/null @@ -1,40 +0,0 @@ -import json -import base64 -from PIL import Image -import io -import torch - -def init_context(context): - context.logger.info("Init context... 0%") - - # Read the DL model - model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5l, yolov5x, custom - context.user_data.model = model - - context.logger.info("Init context...100%") - -def handler(context, event): - context.logger.info("Run yolo-v5 model") - data = event.body - buf = io.BytesIO(base64.b64decode(data["image"])) - threshold = float(data.get("threshold", 0.5)) - context.user_data.model.conf = threshold - image = Image.open(buf) - yolo_results_json = context.user_data.model(image).pandas().xyxy[0].to_dict(orient='records') - - encoded_results = [] - for result in yolo_results_json: - encoded_results.append({ - 'confidence': result['confidence'], - 'label': result['name'], - 'points': [ - result['xmin'], - result['ymin'], - result['xmax'], - result['ymax'] - ], - 'type': 'rectangle' - }) - - return context.Response(body=json.dumps(encoded_results), headers={}, - content_type='application/json', status_code=200) diff --git a/site/content/en/docs/manual/advanced/ai-tools.md b/site/content/en/docs/manual/advanced/ai-tools.md index 3c355eb745d..feab483e901 100644 --- a/site/content/en/docs/manual/advanced/ai-tools.md +++ b/site/content/en/docs/manual/advanced/ai-tools.md @@ -204,7 +204,6 @@ see [Automatic annotation](/docs/manual/advanced/automatic-annotation/). | Mask RCNN | The model generates polygons for each instance of an object in the image.

For more information, see:
  • [GitHub: Mask RCNN](https://github.com/matterport/Mask_RCNN)
  • [Paper: Mask RCNN](https://arxiv.org/pdf/1703.06870.pdf) | | Faster RCNN | The model generates bounding boxes for each instance of an object in the image.
    In this model, RPN and Fast R-CNN are combined into a single network.

    For more information, see:
  • [GitHub: Faster RCNN](https://github.com/ShaoqingRen/faster_rcnn)
  • [Paper: Faster RCNN](https://arxiv.org/pdf/1506.01497.pdf) | | YOLO v3 | YOLO v3 is a family of object detection architectures and models pre-trained on the COCO dataset.

    For more information, see:
  • [GitHub: YOLO v3](https://github.com/ultralytics/yolov3)
  • [Site: YOLO v3](https://docs.ultralytics.com/#yolov3)
  • [Paper: YOLO v3](https://arxiv.org/pdf/1804.02767v1.pdf) | -| YOLO v5 | YOLO v5 is a family of object detection architectures and models based on the Pytorch framework.

    For more information, see:
  • [GitHub: YOLO v5](https://github.com/ultralytics/yolov5)
  • [Site: YOLO v5](https://docs.ultralytics.com/#yolov5) | | Semantic segmentation for ADAS | This is a segmentation network to classify each pixel into 20 classes.

    For more information, see:
  • [Site: ADAS](https://docs.openvino.ai/2019_R1/_semantic_segmentation_adas_0001_description_semantic_segmentation_adas_0001.html) | | Mask RCNN with Tensorflow | Mask RCNN version with Tensorflow. The model generates polygons for each instance of an object in the image.

    For more information, see:
  • [GitHub: Mask RCNN](https://github.com/matterport/Mask_RCNN)
  • [Paper: Mask RCNN](https://arxiv.org/pdf/1703.06870.pdf) | | Faster RCNN with Tensorflow | Faster RCNN version with Tensorflow. The model generates bounding boxes for each instance of an object in the image.
    In this model, RPN and Fast R-CNN are combined into a single network.

    For more information, see:
  • [Site: Faster RCNN with Tensorflow](https://docs.openvino.ai/2021.4/omz_models_model_faster_rcnn_inception_v2_coco.html)
  • [Paper: Faster RCNN](https://arxiv.org/pdf/1506.01497.pdf) | diff --git a/site/content/en/docs/manual/advanced/automatic-annotation.md b/site/content/en/docs/manual/advanced/automatic-annotation.md index 3805509d7ad..9f3c7955fad 100644 --- a/site/content/en/docs/manual/advanced/automatic-annotation.md +++ b/site/content/en/docs/manual/advanced/automatic-annotation.md @@ -93,7 +93,6 @@ List of pre-installed models: | RetinaNet R101 | RetinaNet is a one-stage object detection model that utilizes a focal loss function to address class imbalance during training. Focal loss applies a modulating term to the cross entropy loss to focus learning on hard negative examples. RetinaNet is a single, unified network composed of a backbone network and two task-specific subnetworks.

    For more information, see:
  • [Site: RetinaNET](https://paperswithcode.com/lib/detectron2/retinanet) | | Text detection | Text detector based on PixelLink architecture with MobileNetV2, depth_multiplier=1.4 as a backbone for indoor/outdoor scenes.

    For more information, see:
  • [Site: OpenVINO Text detection 004](https://docs.openvino.ai/2022.3/omz_models_model_text_detection_0004.html) | | YOLO v3 | YOLO v3 is a family of object detection architectures and models pre-trained on the COCO dataset.

    For more information, see:
  • [Site: YOLO v3](https://docs.openvino.ai/2022.3/omz_models_model_yolo_v3_tf.html) | -| YOLO v5 | YOLO v5 is a family of object detection architectures and models based on the Pytorch framework.

    For more information, see:
  • [GitHub: YOLO v5](https://github.com/ultralytics/yolov5)
  • [Site: YOLO v5](https://docs.ultralytics.com/#yolov5) | | YOLO v7 | YOLOv7 is an advanced object detection model that outperforms other detectors in terms of both speed and accuracy. It can process frames at a rate ranging from 5 to 160 frames per second (FPS) and achieves the highest accuracy with 56.8% average precision (AP) among real-time object detectors running at 30 FPS or higher on the V100 graphics processing unit (GPU).

    For more information, see:
  • [GitHub: YOLO v7](https://github.com/WongKinYiu/yolov7)
  • [Paper: YOLO v7](https://arxiv.org/pdf/2207.02696.pdf) | From 78fd667bb49bc59fc8135f088ec6d5320fb7ba0c Mon Sep 17 00:00:00 2001 From: Andrey Zhavoronkov Date: Thu, 27 Jul 2023 15:59:41 +0300 Subject: [PATCH 20/32] Update version --- CHANGELOG.md | 25 +++++++++++++++++++++++++ cvat/__init__.py | 2 +- cvat/schema.yml | 2 +- docker-compose.yml | 18 +++++++++--------- helm-chart/values.yaml | 4 ++-- 5 files changed, 38 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 933d5468e80..cfa756c9e72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,31 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## \[Unreleased] +### Added + +- TDB + +### Changed + +- TDB + +### Deprecated + +- TDB + +### Removed + +- TDB + +### Fixed + +- TDB + +### Security + +- TDB + ## \[2.5.2\] - 2023-07-27 ### Added diff --git a/cvat/__init__.py b/cvat/__init__.py index 2b4479680b2..d227ef70902 100644 --- a/cvat/__init__.py +++ b/cvat/__init__.py @@ -4,6 +4,6 @@ from cvat.utils.version import get_version -VERSION = (2, 5, 2, 'final', 0) +VERSION = (2, 6, 0, 'alpha', 0) __version__ = get_version(VERSION) diff --git a/cvat/schema.yml b/cvat/schema.yml index 091b33daa05..8daba12a02b 100644 --- a/cvat/schema.yml +++ b/cvat/schema.yml @@ -1,7 +1,7 @@ openapi: 3.0.3 info: title: CVAT REST API - version: 2.5.2 + version: '2.6' description: REST API for Computer Vision Annotation Tool (CVAT) termsOfService: https://www.google.com/policies/terms/ contact: diff --git a/docker-compose.yml b/docker-compose.yml index 0bf34777f60..5ea0ff9f76f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,7 +25,7 @@ services: cvat_server: container_name: cvat_server - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -64,7 +64,7 @@ services: cvat_utils: container_name: cvat_utils - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -89,7 +89,7 @@ services: cvat_worker_import: container_name: cvat_worker_import - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -112,7 +112,7 @@ services: cvat_worker_export: container_name: cvat_worker_export - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -135,7 +135,7 @@ services: cvat_worker_annotation: container_name: cvat_worker_annotation - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -158,7 +158,7 @@ services: cvat_worker_webhooks: container_name: cvat_worker_webhooks - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -182,7 +182,7 @@ services: cvat_worker_quality_reports: container_name: cvat_worker_quality_reports - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -204,7 +204,7 @@ services: cvat_worker_analytics_reports: container_name: cvat_worker_analytics_reports - image: cvat/server:${CVAT_VERSION:-v2.5.2} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_redis @@ -227,7 +227,7 @@ services: cvat_ui: container_name: cvat_ui - image: cvat/ui:${CVAT_VERSION:-v2.5.2} + image: cvat/ui:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_server diff --git a/helm-chart/values.yaml b/helm-chart/values.yaml index aa63541a37d..5ebee91dfd6 100644 --- a/helm-chart/values.yaml +++ b/helm-chart/values.yaml @@ -95,7 +95,7 @@ cvat: additionalVolumeMounts: [] replicas: 1 image: cvat/server - tag: v2.5.2 + tag: dev imagePullPolicy: Always permissionFix: enabled: true @@ -119,7 +119,7 @@ cvat: frontend: replicas: 1 image: cvat/ui - tag: v2.5.2 + tag: dev imagePullPolicy: Always labels: {} # test: test From acfa87a96e17d89f3d20907b2f48c06c0f1b6659 Mon Sep 17 00:00:00 2001 From: kolonist-minjun <130522722+kolonist-minjun@users.noreply.github.com> Date: Thu, 10 Aug 2023 16:25:03 +0900 Subject: [PATCH 21/32] Fix filename for label in CamVid format (#6600) Fixed the filename for the label map file in CamVid format from 'labelmap.txt' to 'label_colors.txt' --- CHANGELOG.md | 1 + .../docs/manual/advanced/formats/format-camvid.md | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f34bfcef5fa..56720eaa52b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,6 +39,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Accidentally using prebuilt FFmpeg bundled in PyAV instead of the custom build. +- Fix filename for label in CamVid format () ### Security diff --git a/site/content/en/docs/manual/advanced/formats/format-camvid.md b/site/content/en/docs/manual/advanced/formats/format-camvid.md index 1485188a5bc..301d32b8fcd 100644 --- a/site/content/en/docs/manual/advanced/formats/format-camvid.md +++ b/site/content/en/docs/manual/advanced/formats/format-camvid.md @@ -13,7 +13,7 @@ Downloaded file: a zip archive of the following structure: ```bash taskname.zip/ -├── labelmap.txt # optional, required for non-CamVid labels +├── label_colors.txt # optional, required for non-CamVid labels ├── / | ├── image1.png | └── image2.png @@ -22,13 +22,23 @@ taskname.zip/ | └── image2.png └── .txt -# labelmap.txt +# label_colors.txt (with color value type) +# if you want to manually set the color for labels, configure label_colors.txt as follows: # color (RGB) label 0 0 0 Void 64 128 64 Animal 192 0 128 Archway 0 128 192 Bicyclist 0 128 64 Bridge + +# label_colors.txt (without color value type) +# if you do not manually set the color for labels, it will be set automatically: +# label +Void +Animal +Archway +Bicyclist +Bridge ``` Mask is a `png` image with 1 or 3 channels where each pixel From ba94a8efcb72d9d69d26c8d7edeee208083656f4 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Thu, 10 Aug 2023 10:26:20 +0300 Subject: [PATCH 22/32] Drop Python 3.7 support in the SDK and CLI (#6636) Drop python 3.7 support due to eol --- cvat-cli/setup.py | 2 +- cvat-sdk/cvat_sdk/auto_annotation/interface.py | 3 +-- cvat-sdk/cvat_sdk/core/proxies/model_proxy.py | 3 ++- cvat-sdk/cvat_sdk/core/utils.py | 3 +-- cvat-sdk/cvat_sdk/pytorch/transforms.py | 3 +-- cvat-sdk/gen/generator-config.yml | 2 +- site/content/en/docs/api_sdk/cli/_index.md | 2 +- site/content/en/docs/api_sdk/sdk/_index.md | 2 +- 8 files changed, 9 insertions(+), 11 deletions(-) diff --git a/cvat-cli/setup.py b/cvat-cli/setup.py index 58567b65e3d..454ce2f0095 100644 --- a/cvat-cli/setup.py +++ b/cvat-cli/setup.py @@ -56,7 +56,7 @@ def parse_requirements(filename=BASE_REQUIREMENTS_FILE): "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], - python_requires=">=3.7", + python_requires=">=3.8", install_requires=BASE_REQUIREMENTS, entry_points={ "console_scripts": [ diff --git a/cvat-sdk/cvat_sdk/auto_annotation/interface.py b/cvat-sdk/cvat_sdk/auto_annotation/interface.py index 160d12533d6..67313a7da6e 100644 --- a/cvat-sdk/cvat_sdk/auto_annotation/interface.py +++ b/cvat-sdk/cvat_sdk/auto_annotation/interface.py @@ -3,11 +3,10 @@ # SPDX-License-Identifier: MIT import abc -from typing import List, Sequence +from typing import List, Protocol, Sequence import attrs import PIL.Image -from typing_extensions import Protocol import cvat_sdk.models as models diff --git a/cvat-sdk/cvat_sdk/core/proxies/model_proxy.py b/cvat-sdk/cvat_sdk/core/proxies/model_proxy.py index 9f71fdd9365..9a761771af7 100644 --- a/cvat-sdk/cvat_sdk/core/proxies/model_proxy.py +++ b/cvat-sdk/cvat_sdk/core/proxies/model_proxy.py @@ -13,6 +13,7 @@ Dict, Generic, List, + Literal, Optional, Tuple, Type, @@ -21,7 +22,7 @@ overload, ) -from typing_extensions import Literal, Self +from typing_extensions import Self from cvat_sdk.api_client.model_utils import IModelData, ModelNormal, to_json from cvat_sdk.core.helpers import get_paginated_collection diff --git a/cvat-sdk/cvat_sdk/core/utils.py b/cvat-sdk/cvat_sdk/core/utils.py index e7c28e90e9f..1708dfd5779 100644 --- a/cvat-sdk/cvat_sdk/core/utils.py +++ b/cvat-sdk/cvat_sdk/core/utils.py @@ -14,14 +14,13 @@ ContextManager, Dict, Iterator, + Literal, Sequence, TextIO, Union, overload, ) -from typing_extensions import Literal - def filter_dict( d: Dict[str, Any], *, keep: Sequence[str] = None, drop: Sequence[str] = None diff --git a/cvat-sdk/cvat_sdk/pytorch/transforms.py b/cvat-sdk/cvat_sdk/pytorch/transforms.py index d63fdba65f6..1fb99362def 100644 --- a/cvat-sdk/cvat_sdk/pytorch/transforms.py +++ b/cvat-sdk/cvat_sdk/pytorch/transforms.py @@ -2,13 +2,12 @@ # # SPDX-License-Identifier: MIT -from typing import FrozenSet +from typing import FrozenSet, TypedDict import attrs import attrs.validators import torch import torch.utils.data -from typing_extensions import TypedDict from cvat_sdk.datasets.common import UnsupportedDatasetError from cvat_sdk.pytorch.common import Target diff --git a/cvat-sdk/gen/generator-config.yml b/cvat-sdk/gen/generator-config.yml index 80bc80a5e4f..26e78cb8a3a 100644 --- a/cvat-sdk/gen/generator-config.yml +++ b/cvat-sdk/gen/generator-config.yml @@ -4,7 +4,7 @@ additionalProperties: packageName: "cvat_sdk.api_client" initRequiredVars: true generateSourceCodeOnly: false - generatorLanguageVersion: '>=3.7' + generatorLanguageVersion: '>=3.8' globalProperties: generateAliasAsModel: true apiTests: false diff --git a/site/content/en/docs/api_sdk/cli/_index.md b/site/content/en/docs/api_sdk/cli/_index.md index 83b4b269219..0b7beb67eb1 100644 --- a/site/content/en/docs/api_sdk/cli/_index.md +++ b/site/content/en/docs/api_sdk/cli/_index.md @@ -30,7 +30,7 @@ To install an [official release of CVAT CLI](https://pypi.org/project/cvat-cli/) pip install cvat-cli ``` -We support Python versions 3.7 - 3.9. +We support Python versions 3.8 and higher. ## Usage diff --git a/site/content/en/docs/api_sdk/sdk/_index.md b/site/content/en/docs/api_sdk/sdk/_index.md index eca76659b90..025130ba396 100644 --- a/site/content/en/docs/api_sdk/sdk/_index.md +++ b/site/content/en/docs/api_sdk/sdk/_index.md @@ -44,7 +44,7 @@ To use the PyTorch adapter, request the `pytorch` extra: pip install "cvat-sdk[pytorch]" ``` -We support Python versions 3.7 - 3.9. +We support Python versions 3.8 and higher. ## Usage From 10331842580a4fec477031254d4141231512935c Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Thu, 10 Aug 2023 11:46:54 +0300 Subject: [PATCH 23/32] Remove the predefined YOLOv8 function due to license incompatibility (#6632) Ultralytics is licensed under the AGPL, which is incompatible with our MIT license. As a replacement, I will soon add a torchvision-based function. --- .github/workflows/full.yml | 2 +- .github/workflows/main.yml | 2 +- .../auto_annotation/functions/yolov8n.py | 36 --------- .../openapi-generator/setup.mustache | 1 - site/content/en/docs/api_sdk/cli/_index.md | 5 -- .../en/docs/api_sdk/sdk/auto-annotation.md | 21 ----- tests/python/sdk/test_auto_annotation.py | 76 +------------------ 7 files changed, 3 insertions(+), 140 deletions(-) delete mode 100644 cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py diff --git a/.github/workflows/full.yml b/.github/workflows/full.yml index b90a8599c10..58c3b63113a 100644 --- a/.github/workflows/full.yml +++ b/.github/workflows/full.yml @@ -160,7 +160,7 @@ jobs: - name: Install SDK run: | pip3 install -r ./tests/python/requirements.txt \ - -e './cvat-sdk[pytorch,ultralytics]' -e ./cvat-cli + -e './cvat-sdk[pytorch]' -e ./cvat-cli - name: Running REST API and SDK tests id: run_tests diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b1a85b809fd..023645d5238 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -160,7 +160,7 @@ jobs: - name: Install SDK run: | pip3 install -r ./tests/python/requirements.txt \ - -e './cvat-sdk[pytorch,ultralytics]' -e ./cvat-cli + -e './cvat-sdk[pytorch]' -e ./cvat-cli - name: Run REST API and SDK tests id: run_tests diff --git a/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py b/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py deleted file mode 100644 index 325f6036a63..00000000000 --- a/cvat-sdk/cvat_sdk/auto_annotation/functions/yolov8n.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2023 CVAT.ai Corporation -# -# SPDX-License-Identifier: MIT - -""" -An auto-annotation detection function powered by the YOLOv8n model. -Outputs rectangles. -""" - -from typing import Iterator, List - -import PIL.Image -from ultralytics import YOLO -from ultralytics.engine.results import Results - -import cvat_sdk.auto_annotation as cvataa -import cvat_sdk.models as models - -_model = YOLO("yolov8n.pt") - -spec = cvataa.DetectionFunctionSpec( - labels=[cvataa.label_spec(name, id) for id, name in _model.names.items()], -) - - -def _yolo_to_cvat(results: List[Results]) -> Iterator[models.LabeledShapeRequest]: - for result in results: - for box, label in zip(result.boxes.xyxy, result.boxes.cls): - yield cvataa.rectangle( - label_id=int(label.item()), - points=[p.item() for p in box], - ) - - -def detect(context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: - return list(_yolo_to_cvat(_model.predict(source=image, verbose=False))) diff --git a/cvat-sdk/gen/templates/openapi-generator/setup.mustache b/cvat-sdk/gen/templates/openapi-generator/setup.mustache index fc6f34144da..eb89f5d2055 100644 --- a/cvat-sdk/gen/templates/openapi-generator/setup.mustache +++ b/cvat-sdk/gen/templates/openapi-generator/setup.mustache @@ -78,7 +78,6 @@ setup( install_requires=BASE_REQUIREMENTS, extras_require={ "pytorch": ['torch', 'torchvision'], - "ultralytics": ["ultralytics"], }, package_dir={"": "."}, packages=find_packages(include=["cvat_sdk*"]), diff --git a/site/content/en/docs/api_sdk/cli/_index.md b/site/content/en/docs/api_sdk/cli/_index.md index 0b7beb67eb1..1067d934a14 100644 --- a/site/content/en/docs/api_sdk/cli/_index.md +++ b/site/content/en/docs/api_sdk/cli/_index.md @@ -249,11 +249,6 @@ def detect(context, image): ... ``` -- Annotate the task with id 137 with the predefined YOLOv8 function: - ```bash - cvat-cli auto-annotate 137 --function-module cvat_sdk.auto_annotation.functions.yolov8n - ``` - - Annotate the task with id 138 with an AA function defined in `my_func.py`: ```bash cvat-cli auto-annotate 138 --function-file path/to/my_func.py diff --git a/site/content/en/docs/api_sdk/sdk/auto-annotation.md b/site/content/en/docs/api_sdk/sdk/auto-annotation.md index a8d9c1d79c2..d6281f7e168 100644 --- a/site/content/en/docs/api_sdk/sdk/auto-annotation.md +++ b/site/content/en/docs/api_sdk/sdk/auto-annotation.md @@ -197,24 +197,3 @@ Same logic applies to sub-label IDs. `annotate_task` will raise a `BadFunctionError` exception if it detects that the function violated the AA function protocol. - -## Predefined AA function - -This layer includes a predefined AA function based on the Ultralytics YOLOv8n model. -You can use this AA function as-is, or use it as a base on which to build your own. - -To use this function, you have to install CVAT SDK with the `ultralytics` extra: - -```console -$ pip install "cvat-sdk[ultralytics]" -``` - -The AA function is implemented as a module -in order to be compatible with the `cvat-cli auto-annotate` command. -Simply import `cvat_sdk.auto_annotation.functions.yolov8n` -and use the module itself as a function: - -```python -import cvat_sdk.auto_annotation.functions.yolov8n as yolov8n -annotate_task(, , yolov8n) -``` diff --git a/tests/python/sdk/test_auto_annotation.py b/tests/python/sdk/test_auto_annotation.py index 05814affee7..c41b655d6c5 100644 --- a/tests/python/sdk/test_auto_annotation.py +++ b/tests/python/sdk/test_auto_annotation.py @@ -3,11 +3,10 @@ # SPDX-License-Identifier: MIT import io -import sys from logging import Logger from pathlib import Path from types import SimpleNamespace as namespace -from typing import Any, List, Tuple +from typing import List, Tuple import cvat_sdk.auto_annotation as cvataa import PIL.Image @@ -19,13 +18,6 @@ from .util import make_pbar -try: - import numpy as np - from ultralytics.engine.results import Results as UResults -except ModuleNotFoundError: - np = None - UResults = None - @pytest.fixture(autouse=True) def _common_setup( @@ -561,69 +553,3 @@ def test_non_skeleton_with_elements(self): ], "non-skeleton shape with elements", ) - - -class FakeYolo: - def __init__(self, *args, **kwargs) -> None: - pass - - names = {42: "person"} - - def predict(self, source: Any, **kwargs) -> "List[UResults]": - return [ - UResults( - orig_img=np.zeros([100, 100, 3]), - path=None, - names=self.names, - boxes=np.array([[1, 2, 3, 4, 0.9, 42]]), - ) - ] - - -@pytest.mark.skipif(UResults is None, reason="Ultralytics is not installed") -class TestAutoAnnotationFunctions: - @pytest.fixture(autouse=True) - def setup( - self, - tmp_path: Path, - fxt_login: Tuple[Client, str], - ): - self.client = fxt_login[0] - self.image = generate_image_file("1.png", size=(100, 100)) - - image_dir = tmp_path / "images" - image_dir.mkdir() - - image_path = image_dir / self.image.name - image_path.write_bytes(self.image.getbuffer()) - - self.task = self.client.tasks.create_from_data( - models.TaskWriteRequest( - "Auto-annotation test task", - labels=[ - models.PatchedLabelRequest(name="person"), - ], - ), - resources=[image_path], - ) - - task_labels = self.task.get_labels() - self.task_labels_by_id = {label.id: label for label in task_labels} - - def test_yolov8n(self, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr("ultralytics.YOLO", FakeYolo) - - import cvat_sdk.auto_annotation.functions.yolov8n as yolov8n - - try: - cvataa.annotate_task(self.client, self.task.id, yolov8n) - - annotations = self.task.get_annotations() - - assert len(annotations.shapes) == 1 - assert self.task_labels_by_id[annotations.shapes[0].label_id].name == "person" - assert annotations.shapes[0].type.value == "rectangle" - assert annotations.shapes[0].points == [1, 2, 3, 4] - - finally: - del sys.modules[yolov8n.__name__] From 9a01ece051b43ca4186fd860f2124ac257826baa Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 11 Aug 2023 11:17:26 +0300 Subject: [PATCH 24/32] Added cached frames indication (#6586) Depends on #6585 ### Motivation and context Resolved #8 Decoded range is red: image ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [x] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [x] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- CHANGELOG.md | 1 + cvat-core/package.json | 2 +- cvat-core/src/frames.ts | 4 +-- cvat-core/src/session-implementation.ts | 23 ++++++------ cvat-core/src/session.ts | 13 +++---- cvat-data/src/ts/cvat-data.ts | 18 ++++------ cvat-ui/package.json | 2 +- cvat-ui/src/actions/annotation-actions.ts | 35 +++++++++++++++++-- .../canvas/views/canvas2d/canvas-wrapper.tsx | 4 +-- .../views/canvas3d/canvas-wrapper3D.tsx | 4 +-- .../components/annotation-page/styles.scss | 32 ++++++++++++++--- .../top-bar/player-navigation.tsx | 15 ++++++++ .../annotation-page/top-bar/top-bar.tsx | 3 ++ .../annotation-page/top-bar/top-bar.tsx | 5 +++ cvat-ui/src/reducers/annotation-reducer.ts | 7 ++++ cvat-ui/src/reducers/index.ts | 1 + 16 files changed, 124 insertions(+), 45 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 56720eaa52b..ba25166343e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 user-provided function on the local machine, and a corresponding CLI command (`auto-annotate`) () +- Cached frames indication on the interface () ### Changed diff --git a/cvat-core/package.json b/cvat-core/package.json index 5556d187765..d9712f79ea6 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "10.0.1", + "version": "11.0.0", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", "scripts": { diff --git a/cvat-core/src/frames.ts b/cvat-core/src/frames.ts index 158e3582114..2f5b328f81b 100644 --- a/cvat-core/src/frames.ts +++ b/cvat-core/src/frames.ts @@ -582,12 +582,12 @@ export async function findFrame( return lastUndeletedFrame; } -export function getRanges(jobID): Array { +export function getCachedChunks(jobID): number[] { if (!(jobID in frameDataCache)) { return []; } - return frameDataCache[jobID].provider.cachedFrames; + return frameDataCache[jobID].provider.cachedChunks(true); } export function clear(jobID: number): void { diff --git a/cvat-core/src/session-implementation.ts b/cvat-core/src/session-implementation.ts index ea57c1881f1..5a873ae823c 100644 --- a/cvat-core/src/session-implementation.ts +++ b/cvat-core/src/session-implementation.ts @@ -13,7 +13,7 @@ import { getFrame, deleteFrame, restoreFrame, - getRanges, + getCachedChunks, clear as clearFrames, findFrame, getContextImage, @@ -163,9 +163,9 @@ export function implementJob(Job) { return result; }; - Job.prototype.frames.ranges.implementation = async function () { - const rangesData = await getRanges(this.id); - return rangesData; + Job.prototype.frames.cachedChunks.implementation = async function () { + const cachedChunks = await getCachedChunks(this.id); + return cachedChunks; }; Job.prototype.frames.preview.implementation = async function (this: JobClass): Promise { @@ -570,21 +570,18 @@ export function implementTask(Task) { isPlaying, step, this.dimension, + (chunkNumber, quality) => job.frames.chunk(chunkNumber, quality), ); return result; }; - Task.prototype.frames.ranges.implementation = async function () { - const rangesData = { - decoded: [], - buffered: [], - }; + Task.prototype.frames.cachedChunks.implementation = async function () { + let chunks = []; for (const job of this.jobs) { - const { decoded, buffered } = await getRanges(job.id); - rangesData.decoded.push(decoded); - rangesData.buffered.push(buffered); + const cachedChunks = await getCachedChunks(job.id); + chunks = chunks.concat(cachedChunks); } - return rangesData; + return Array.from(new Set(chunks)); }; Task.prototype.frames.preview.implementation = async function (this: TaskClass): Promise { diff --git a/cvat-core/src/session.ts b/cvat-core/src/session.ts index 4f34ed458cc..cad8b773ae2 100644 --- a/cvat-core/src/session.ts +++ b/cvat-core/src/session.ts @@ -210,8 +210,8 @@ function buildDuplicatedAPI(prototype) { prototype.frames.save, ); }, - async ranges() { - const result = await PluginRegistry.apiWrapper.call(this, prototype.frames.ranges); + async cachedChunks() { + const result = await PluginRegistry.apiWrapper.call(this, prototype.frames.cachedChunks); return result; }, async preview() { @@ -329,6 +329,7 @@ export class Job extends Session { public readonly taskId: number; public readonly dimension: DimensionType; public readonly dataChunkType: ChunkType; + public readonly dataChunkSize: number; public readonly bugTracker: string | null; public readonly mode: TaskMode; public readonly labels: Label[]; @@ -369,7 +370,7 @@ export class Job extends Session { delete: CallableFunction; restore: CallableFunction; save: CallableFunction; - ranges: CallableFunction; + cachedChunks: CallableFunction; preview: CallableFunction; contextImage: CallableFunction; search: CallableFunction; @@ -573,7 +574,7 @@ export class Job extends Session { delete: Object.getPrototypeOf(this).frames.delete.bind(this), restore: Object.getPrototypeOf(this).frames.restore.bind(this), save: Object.getPrototypeOf(this).frames.save.bind(this), - ranges: Object.getPrototypeOf(this).frames.ranges.bind(this), + cachedChunks: Object.getPrototypeOf(this).frames.cachedChunks.bind(this), preview: Object.getPrototypeOf(this).frames.preview.bind(this), search: Object.getPrototypeOf(this).frames.search.bind(this), contextImage: Object.getPrototypeOf(this).frames.contextImage.bind(this), @@ -684,7 +685,7 @@ export class Task extends Session { delete: CallableFunction; restore: CallableFunction; save: CallableFunction; - ranges: CallableFunction; + cachedChunks: CallableFunction; preview: CallableFunction; contextImage: CallableFunction; search: CallableFunction; @@ -1101,7 +1102,7 @@ export class Task extends Session { delete: Object.getPrototypeOf(this).frames.delete.bind(this), restore: Object.getPrototypeOf(this).frames.restore.bind(this), save: Object.getPrototypeOf(this).frames.save.bind(this), - ranges: Object.getPrototypeOf(this).frames.ranges.bind(this), + cachedChunks: Object.getPrototypeOf(this).frames.cachedChunks.bind(this), preview: Object.getPrototypeOf(this).frames.preview.bind(this), contextImage: Object.getPrototypeOf(this).frames.contextImage.bind(this), search: Object.getPrototypeOf(this).frames.search.bind(this), diff --git a/cvat-data/src/ts/cvat-data.ts b/cvat-data/src/ts/cvat-data.ts index 00768a04351..8d50fe64083 100644 --- a/cvat-data/src/ts/cvat-data.ts +++ b/cvat-data/src/ts/cvat-data.ts @@ -327,17 +327,11 @@ export class FrameDecoder { } } - get cachedChunks(): number[] { - return Object.keys(this.decodedChunks).map((chunkNumber: string) => +chunkNumber).sort((a, b) => a - b); - } - - get cachedFrames(): string[] { - const chunks = Object.keys(this.decodedChunks).map((chunkNumber: string) => +chunkNumber).sort((a, b) => a - b); - return chunks.map((chunk) => { - const frames = Object.keys(this.decodedChunks[chunk]).map((frame) => +frame); - const min = Math.min(...frames); - const max = Math.max(...frames); - return `${min}:${max}`; - }); + public cachedChunks(includeInProgress = false): number[] { + const chunkIsBeingDecoded = includeInProgress && this.chunkIsBeingDecoded ? + Math.floor(this.chunkIsBeingDecoded.start / this.chunkSize) : null; + return Object.keys(this.decodedChunks).map((chunkNumber: string) => +chunkNumber).concat( + ...(chunkIsBeingDecoded !== null ? [chunkIsBeingDecoded] : []), + ).sort((a, b) => a - b); } } diff --git a/cvat-ui/package.json b/cvat-ui/package.json index 46e6739ff9c..90ab5864200 100644 --- a/cvat-ui/package.json +++ b/cvat-ui/package.json @@ -1,6 +1,6 @@ { "name": "cvat-ui", - "version": "1.54.2", + "version": "1.55.0", "description": "CVAT single-page application", "main": "src/index.tsx", "scripts": { diff --git a/cvat-ui/src/actions/annotation-actions.ts b/cvat-ui/src/actions/annotation-actions.ts index 8b7a899b9ec..24c3cc63c17 100644 --- a/cvat-ui/src/actions/annotation-actions.ts +++ b/cvat-ui/src/actions/annotation-actions.ts @@ -581,10 +581,41 @@ export function switchPlay(playing: boolean): AnyAction { }; } -export function confirmCanvasReady(): AnyAction { +export function confirmCanvasReady(ranges?: string): AnyAction { return { type: AnnotationActionTypes.CONFIRM_CANVAS_READY, - payload: {}, + payload: { ranges }, + }; +} + +export function confirmCanvasReadyAsync(): ThunkAction { + return async (dispatch: ActionCreator, getState: () => CombinedState): Promise => { + try { + const state: CombinedState = getState(); + const { instance: job } = state.annotation.job; + const chunks = await job.frames.cachedChunks() as number[]; + const { startFrame, stopFrame, dataChunkSize } = job; + + const ranges = chunks.map((chunk) => ( + [ + Math.max(startFrame, chunk * dataChunkSize), + Math.min(stopFrame, (chunk + 1) * dataChunkSize - 1), + ] + )).reduce>((acc, val) => { + if (acc.length && acc[acc.length - 1][1] + 1 === val[0]) { + const newMax = val[1]; + acc[acc.length - 1][1] = newMax; + } else { + acc.push(val as [number, number]); + } + return acc; + }, []).map(([start, end]) => `${start}:${end}`).join(';'); + + dispatch(confirmCanvasReady(ranges)); + } catch (error) { + // even if error happens here, do not need to notify the users + dispatch(confirmCanvasReady()); + } }; } diff --git a/cvat-ui/src/components/annotation-page/canvas/views/canvas2d/canvas-wrapper.tsx b/cvat-ui/src/components/annotation-page/canvas/views/canvas2d/canvas-wrapper.tsx index 4493ba014f0..df2d3d63381 100644 --- a/cvat-ui/src/components/annotation-page/canvas/views/canvas2d/canvas-wrapper.tsx +++ b/cvat-ui/src/components/annotation-page/canvas/views/canvas2d/canvas-wrapper.tsx @@ -24,7 +24,7 @@ import config from 'config'; import CVATTooltip from 'components/common/cvat-tooltip'; import FrameTags from 'components/annotation-page/tag-annotation-workspace/frame-tags'; import { - confirmCanvasReady, + confirmCanvasReadyAsync, dragCanvas, zoomCanvas, resetCanvas, @@ -259,7 +259,7 @@ function mapStateToProps(state: CombinedState): StateToProps { function mapDispatchToProps(dispatch: any): DispatchToProps { return { onSetupCanvas(): void { - dispatch(confirmCanvasReady()); + dispatch(confirmCanvasReadyAsync()); }, onDragCanvas(enabled: boolean): void { dispatch(dragCanvas(enabled)); diff --git a/cvat-ui/src/components/annotation-page/canvas/views/canvas3d/canvas-wrapper3D.tsx b/cvat-ui/src/components/annotation-page/canvas/views/canvas3d/canvas-wrapper3D.tsx index e21a4a138d9..479ad283ede 100644 --- a/cvat-ui/src/components/annotation-page/canvas/views/canvas3d/canvas-wrapper3D.tsx +++ b/cvat-ui/src/components/annotation-page/canvas/views/canvas3d/canvas-wrapper3D.tsx @@ -16,7 +16,7 @@ import Spin from 'antd/lib/spin'; import { activateObject, - confirmCanvasReady, + confirmCanvasReadyAsync, createAnnotationsAsync, dragCanvas, editShape, @@ -131,7 +131,7 @@ function mapDispatchToProps(dispatch: any): DispatchToProps { dispatch(dragCanvas(enabled)); }, onSetupCanvas(): void { - dispatch(confirmCanvasReady()); + dispatch(confirmCanvasReadyAsync()); }, onResetCanvas(): void { dispatch(resetCanvas()); diff --git a/cvat-ui/src/components/annotation-page/styles.scss b/cvat-ui/src/components/annotation-page/styles.scss index 7d75f57d8f7..9d31599fe3d 100644 --- a/cvat-ui/src/components/annotation-page/styles.scss +++ b/cvat-ui/src/components/annotation-page/styles.scss @@ -3,7 +3,7 @@ // // SPDX-License-Identifier: MIT -@import '../../base.scss'; +@import '../../base'; .cvat-annotation-page.ant-layout { height: 100%; @@ -126,15 +126,39 @@ } } -.cvat-player-slider { +.cvat-player-slider.ant-slider { width: 350px; margin: 0; + margin-top: $grid-unit-size * -0.5; + + > .ant-slider-handle { + z-index: 100; + margin-top: -3.5px; + } + + > .ant-slider-track { + background: none; + } > .ant-slider-rail { + height: $grid-unit-size; background-color: $player-slider-color; } } +.cvat-player-slider-progress { + width: 350px; + height: $grid-unit-size; + position: absolute; + top: 0; + pointer-events: none; + + > rect { + transition: width 0.5s; + fill: #1890ff; + } +} + .cvat-player-filename-wrapper { max-width: $grid-unit-size * 30; max-height: $grid-unit-size * 3; @@ -221,7 +245,7 @@ .ant-table-thead { > tr > th { - padding: 5px 5px; + padding: $grid-unit-size 0 $grid-unit-size $grid-unit-size * 0.5; } } } @@ -446,7 +470,7 @@ } .group { - background: rgba(216, 233, 250, 0.5); + background: rgba(216, 233, 250, 50%); border: 1px solid #d3e0ec; } } diff --git a/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx b/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx index e95d15cf33d..931693690be 100644 --- a/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx +++ b/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx @@ -21,6 +21,7 @@ interface Props { startFrame: number; stopFrame: number; playing: boolean; + ranges: string; frameNumber: number; frameFilename: string; frameDeleted: boolean; @@ -47,6 +48,7 @@ function PlayerNavigation(props: Props): JSX.Element { deleteFrameShortcut, focusFrameInputShortcut, inputFrameRef, + ranges, onSliderChange, onInputChange, onURLIconClick, @@ -105,6 +107,19 @@ function PlayerNavigation(props: Props): JSX.Element { value={frameNumber || 0} onChange={onSliderChange} /> + {!!ranges && ( + + {ranges.split(';').map((range) => { + const [start, end] = range.split(':').map((num) => +num); + const adjustedStart = Math.max(0, start - 1); + const totalSegments = stopFrame - startFrame; + const segmentWidth = 1000 / totalSegments; + const width = Math.max((end - adjustedStart), 1) * segmentWidth; + const offset = (Math.max((adjustedStart - startFrame), 0) / totalSegments) * 1000; + return (); + })} + + )} diff --git a/cvat-ui/src/components/annotation-page/top-bar/top-bar.tsx b/cvat-ui/src/components/annotation-page/top-bar/top-bar.tsx index ee800ce6861..7c88063bf21 100644 --- a/cvat-ui/src/components/annotation-page/top-bar/top-bar.tsx +++ b/cvat-ui/src/components/annotation-page/top-bar/top-bar.tsx @@ -69,6 +69,7 @@ interface Props { onRestoreFrame(): void; switchNavigationBlocked(blocked: boolean): void; jobInstance: any; + ranges: string; } export default function AnnotationTopBarComponent(props: Props): JSX.Element { @@ -77,6 +78,7 @@ export default function AnnotationTopBarComponent(props: Props): JSX.Element { undoAction, redoAction, playing, + ranges, frameNumber, frameFilename, frameDeleted, @@ -168,6 +170,7 @@ export default function AnnotationTopBarComponent(props: Props): JSX.Element { startFrame={startFrame} stopFrame={stopFrame} playing={playing} + ranges={ranges} frameNumber={frameNumber} frameFilename={frameFilename} frameDeleted={frameDeleted} diff --git a/cvat-ui/src/containers/annotation-page/top-bar/top-bar.tsx b/cvat-ui/src/containers/annotation-page/top-bar/top-bar.tsx index f53c5f61cdb..067d85fe1ad 100644 --- a/cvat-ui/src/containers/annotation-page/top-bar/top-bar.tsx +++ b/cvat-ui/src/containers/annotation-page/top-bar/top-bar.tsx @@ -65,6 +65,7 @@ interface StateToProps { normalizedKeyMap: Record; canvasInstance: Canvas | Canvas3d; forceExit: boolean; + ranges: string; activeControl: ActiveControl; } @@ -91,6 +92,7 @@ function mapStateToProps(state: CombinedState): StateToProps { annotation: { player: { playing, + ranges, frame: { data: { deleted: frameIsDeleted }, filename: frameFilename, @@ -142,6 +144,7 @@ function mapStateToProps(state: CombinedState): StateToProps { canvasInstance, forceExit, activeControl, + ranges, }; } @@ -638,6 +641,7 @@ class AnnotationTopBarContainer extends React.PureComponent { workspace, canvasIsReady, keyMap, + ranges, normalizedKeyMap, activeControl, searchAnnotations, @@ -766,6 +770,7 @@ class AnnotationTopBarContainer extends React.PureComponent { workspace={workspace} playing={playing} saving={saving} + ranges={ranges} startFrame={startFrame} stopFrame={stopFrame} frameNumber={frameNumber} diff --git a/cvat-ui/src/reducers/annotation-reducer.ts b/cvat-ui/src/reducers/annotation-reducer.ts index 71524879f8b..d7d395adb6f 100644 --- a/cvat-ui/src/reducers/annotation-reducer.ts +++ b/cvat-ui/src/reducers/annotation-reducer.ts @@ -55,6 +55,7 @@ const defaultState: AnnotationState = { job: { openTime: null, labels: [], + groundTruthJobFramesMeta: null, requestedId: null, groundTruthJobId: null, instance: null, @@ -72,6 +73,7 @@ const defaultState: AnnotationState = { delay: 0, changeTime: null, }, + ranges: '', playing: false, frameAngles: [], navigationBlocked: false, @@ -417,8 +419,13 @@ export default (state = defaultState, action: AnyAction): AnnotationState => { }; } case AnnotationActionTypes.CONFIRM_CANVAS_READY: { + const { ranges } = action.payload; return { ...state, + player: { + ...state.player, + ranges: ranges || state.player.ranges, + }, canvas: { ...state.canvas, ready: true, diff --git a/cvat-ui/src/reducers/index.ts b/cvat-ui/src/reducers/index.ts index f29b7dff8f9..2207d73b807 100644 --- a/cvat-ui/src/reducers/index.ts +++ b/cvat-ui/src/reducers/index.ts @@ -695,6 +695,7 @@ export interface AnnotationState { delay: number; changeTime: number | null; }; + ranges: string; navigationBlocked: boolean; playing: boolean; frameAngles: number[]; From 84fd7819b0071b44119fb229f7d4d3f83c9fa68a Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 11 Aug 2023 11:36:36 +0300 Subject: [PATCH 25/32] Quick fix for navigation in jobs with one image (#6655) ### Motivation and context ### How has this been tested? ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have added a description of my changes into the [CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md) file - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) - [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning)) ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --- .../annotation-page/top-bar/player-navigation.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx b/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx index 931693690be..4a05b4444a1 100644 --- a/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx +++ b/cvat-ui/src/components/annotation-page/top-bar/player-navigation.tsx @@ -112,7 +112,11 @@ function PlayerNavigation(props: Props): JSX.Element { {ranges.split(';').map((range) => { const [start, end] = range.split(':').map((num) => +num); const adjustedStart = Math.max(0, start - 1); - const totalSegments = stopFrame - startFrame; + let totalSegments = stopFrame - startFrame; + if (totalSegments === 0) { + // corner case for jobs with one image + totalSegments = 1; + } const segmentWidth = 1000 / totalSegments; const width = Math.max((end - adjustedStart), 1) * segmentWidth; const offset = (Math.max((adjustedStart - startFrame), 0) / totalSegments) * 1000; From d5cbfae99bccb41097b1dae33ac181d04daa68dc Mon Sep 17 00:00:00 2001 From: Nikita Manovich Date: Fri, 11 Aug 2023 13:53:45 +0300 Subject: [PATCH 26/32] [Snyk] Security upgrade certifi from 2023.5.7 to 2023.7.22 (#6565) Co-authored-by: snyk-bot From 096870e3e34287177b69dbe214af79036d8b6b95 Mon Sep 17 00:00:00 2001 From: Andrey Zhavoronkov Date: Fri, 11 Aug 2023 13:56:33 +0300 Subject: [PATCH 27/32] [Snyk] Security upgrade cryptography from 41.0.0 to 41.0.2 (#6498) Co-authored-by: snyk-bot From b0b8e492eb8ab5123a1691e92cb1769c5ea79695 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Fri, 11 Aug 2023 14:21:57 +0300 Subject: [PATCH 28/32] SDK: Add predefined functions based on torchvision (#6649) These serve as a replacement for YOLOv8n that was removed in #6632. To support these functions, I also add an ability to define parameterized functions for use with the CLI. --- CHANGELOG.md | 3 +- cvat-cli/src/cvat_cli/cli.py | 10 +- cvat-cli/src/cvat_cli/parser.py | 45 +++++ .../functions/torchvision_detection.py | 41 +++++ .../torchvision_keypoint_detection.py | 59 +++++++ site/content/en/docs/api_sdk/cli/_index.md | 51 +++++- .../en/docs/api_sdk/sdk/auto-annotation.md | 54 ++++++ .../cli/example_parameterized_function.py | 32 ++++ tests/python/cli/test_cli.py | 17 ++ tests/python/sdk/test_auto_annotation.py | 159 ++++++++++++++++++ 10 files changed, 460 insertions(+), 11 deletions(-) create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_detection.py create mode 100644 cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_keypoint_detection.py create mode 100644 tests/python/cli/example_parameterized_function.py diff --git a/CHANGELOG.md b/CHANGELOG.md index ba25166343e..cecc6f5074c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,7 +15,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 functionality for automatically annotating a task by running a user-provided function on the local machine, and a corresponding CLI command (`auto-annotate`) - () + (, + ) - Cached frames indication on the interface () ### Changed diff --git a/cvat-cli/src/cvat_cli/cli.py b/cvat-cli/src/cvat_cli/cli.py index d0417944aa6..114e5bed894 100644 --- a/cvat-cli/src/cvat_cli/cli.py +++ b/cvat-cli/src/cvat_cli/cli.py @@ -8,7 +8,7 @@ import importlib.util import json from pathlib import Path -from typing import Dict, List, Optional, Sequence, Tuple +from typing import Any, Dict, List, Optional, Sequence, Tuple import cvat_sdk.auto_annotation as cvataa from cvat_sdk import Client, models @@ -151,6 +151,7 @@ def tasks_auto_annotate( *, function_module: Optional[str] = None, function_file: Optional[Path] = None, + function_parameters: Dict[str, Any], clear_existing: bool = False, allow_unmatched_labels: bool = False, ) -> None: @@ -163,6 +164,13 @@ def tasks_auto_annotate( else: assert False, "function identification arguments missing" + if hasattr(function, "create"): + # this is actually a function factory + function = function.create(**function_parameters) + else: + if function_parameters: + raise TypeError("function takes no parameters") + cvataa.annotate_task( self.client, task_id, diff --git a/cvat-cli/src/cvat_cli/parser.py b/cvat-cli/src/cvat_cli/parser.py index c1a7e6c3abd..f03a52f9b41 100644 --- a/cvat-cli/src/cvat_cli/parser.py +++ b/cvat-cli/src/cvat_cli/parser.py @@ -11,6 +11,7 @@ import textwrap from distutils.util import strtobool from pathlib import Path +from typing import Any, Tuple from cvat_sdk.core.proxies.tasks import ResourceType @@ -41,6 +42,40 @@ def parse_resource_type(s: str) -> ResourceType: return s +def parse_function_parameter(s: str) -> Tuple[str, Any]: + key, sep, type_and_value = s.partition("=") + + if not sep: + raise argparse.ArgumentTypeError("parameter value not specified") + + type_, sep, value = type_and_value.partition(":") + + if not sep: + raise argparse.ArgumentTypeError("parameter type not specified") + + if type_ == "int": + value = int(value) + elif type_ == "float": + value = float(value) + elif type_ == "str": + pass + elif type_ == "bool": + value = bool(strtobool(value)) + else: + raise argparse.ArgumentTypeError(f"unsupported parameter type {type_!r}") + + return (key, value) + + +class BuildDictAction(argparse.Action): + def __init__(self, option_strings, dest, default=None, **kwargs): + super().__init__(option_strings, dest, default=default or {}, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + key, value = values + getattr(namespace, self.dest)[key] = value + + def make_cmdline_parser() -> argparse.ArgumentParser: ####################################################################### # Command line interface definition @@ -394,6 +429,16 @@ def make_cmdline_parser() -> argparse.ArgumentParser: help="path to a Python source file to use as the function", ) + auto_annotate_task_parser.add_argument( + "--function-parameter", + "-p", + metavar="NAME=TYPE:VALUE", + type=parse_function_parameter, + action=BuildDictAction, + dest="function_parameters", + help="parameter for the function", + ) + auto_annotate_task_parser.add_argument( "--clear-existing", action="store_true", help="Remove existing annotations from the task" ) diff --git a/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_detection.py b/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_detection.py new file mode 100644 index 00000000000..57457d74225 --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_detection.py @@ -0,0 +1,41 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from functools import cached_property +from typing import List + +import PIL.Image +import torchvision.models + +import cvat_sdk.auto_annotation as cvataa +import cvat_sdk.models as models + + +class _TorchvisionDetectionFunction: + def __init__(self, model_name: str, weights_name: str = "DEFAULT", **kwargs) -> None: + weights_enum = torchvision.models.get_model_weights(model_name) + self._weights = weights_enum[weights_name] + self._transforms = self._weights.transforms() + self._model = torchvision.models.get_model(model_name, weights=self._weights, **kwargs) + self._model.eval() + + @cached_property + def spec(self) -> cvataa.DetectionFunctionSpec: + return cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec(cat, i) for i, cat in enumerate(self._weights.meta["categories"]) + ] + ) + + def detect(self, context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + results = self._model([self._transforms(image)]) + + return [ + cvataa.rectangle(label.item(), [x.item() for x in box]) + for result in results + for box, label in zip(result["boxes"], result["labels"]) + ] + + +create = _TorchvisionDetectionFunction diff --git a/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_keypoint_detection.py b/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_keypoint_detection.py new file mode 100644 index 00000000000..b4eb47d476d --- /dev/null +++ b/cvat-sdk/cvat_sdk/auto_annotation/functions/torchvision_keypoint_detection.py @@ -0,0 +1,59 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from functools import cached_property +from typing import List + +import PIL.Image +import torchvision.models + +import cvat_sdk.auto_annotation as cvataa +import cvat_sdk.models as models + + +class _TorchvisionKeypointDetectionFunction: + def __init__(self, model_name: str, weights_name: str = "DEFAULT", **kwargs) -> None: + weights_enum = torchvision.models.get_model_weights(model_name) + self._weights = weights_enum[weights_name] + self._transforms = self._weights.transforms() + self._model = torchvision.models.get_model(model_name, weights=self._weights, **kwargs) + self._model.eval() + + @cached_property + def spec(self) -> cvataa.DetectionFunctionSpec: + return cvataa.DetectionFunctionSpec( + labels=[ + cvataa.skeleton_label_spec( + cat, + i, + [ + cvataa.keypoint_spec(name, j) + for j, name in enumerate(self._weights.meta["keypoint_names"]) + ], + ) + for i, cat in enumerate(self._weights.meta["categories"]) + ] + ) + + def detect(self, context, image: PIL.Image.Image) -> List[models.LabeledShapeRequest]: + results = self._model([self._transforms(image)]) + + return [ + cvataa.skeleton( + label.item(), + elements=[ + cvataa.keypoint( + keypoint_id, + [keypoint[0].item(), keypoint[1].item()], + occluded=not keypoint[2].item(), + ) + for keypoint_id, keypoint in enumerate(keypoints) + ], + ) + for result in results + for keypoints, label in zip(result["keypoints"], result["labels"]) + ] + + +create = _TorchvisionKeypointDetectionFunction diff --git a/site/content/en/docs/api_sdk/cli/_index.md b/site/content/en/docs/api_sdk/cli/_index.md index 1067d934a14..bca0a070264 100644 --- a/site/content/en/docs/api_sdk/cli/_index.md +++ b/site/content/en/docs/api_sdk/cli/_index.md @@ -235,19 +235,52 @@ by using the [label constructor](/docs/manual/basics/creating_an_annotation_task This command provides a command-line interface to the [auto-annotation API](/docs/api_sdk/sdk/auto-annotation). -To use it, create a Python module that implements the AA function protocol. -In other words, this module must define the required attributes on the module level. -For example: +It can auto-annotate using AA functions implemented in one of the following ways: -```python -import cvat_sdk.auto_annotation as cvataa +1. As a Python module directly implementing the AA function protocol. + Such a module must define the required attributes at the module level. -spec = cvataa.DetectionFunctionSpec(...) + For example: -def detect(context, image): - ... -``` + ```python + import cvat_sdk.auto_annotation as cvataa + + spec = cvataa.DetectionFunctionSpec(...) + + def detect(context, image): + ... + ``` + +1. As a Python module implementing a factory function named `create`. + This function must return an object implementing the AA function protocol. + Any parameters specified on the command line using the `-p` option + will be passed to `create`. + + For example: + + ```python + import cvat_sdk.auto_annotation as cvataa + + class _MyFunction: + def __init__(...): + ... + + spec = cvataa.DetectionFunctionSpec(...) + + def detect(context, image): + ... + + def create(...) -> cvataa.DetectionFunction: + return _MyFunction(...) + ``` + +- Annotate the task with id 137 with the predefined torchvision detection function, + which is parameterized: + ```bash + cvat-cli auto-annotate 137 --function-module cvat_sdk.auto_annotation.functions.torchvision_detection \ + -p model_name=str:fasterrcnn_resnet50_fpn_v2 -p box_score_thresh=float:0.5 + ``` - Annotate the task with id 138 with an AA function defined in `my_func.py`: ```bash diff --git a/site/content/en/docs/api_sdk/sdk/auto-annotation.md b/site/content/en/docs/api_sdk/sdk/auto-annotation.md index d6281f7e168..b85ab7b067b 100644 --- a/site/content/en/docs/api_sdk/sdk/auto-annotation.md +++ b/site/content/en/docs/api_sdk/sdk/auto-annotation.md @@ -197,3 +197,57 @@ Same logic applies to sub-label IDs. `annotate_task` will raise a `BadFunctionError` exception if it detects that the function violated the AA function protocol. + +## Predefined AA functions + +This layer includes several predefined AA functions. +You can use them as-is, or as a base on which to build your own. + +Each function is implemented as a module +to allow usage via the CLI `auto-annotate` command. +Therefore, in order to use it from the SDK, +you'll need to import the corresponding module. + +### `cvat_sdk.auto_annotation.functions.torchvision_detection` + +This AA function uses object detection models from +the [torchvision](https://pytorch.org/vision/stable/index.html) library. +It produces rectangle annotations. + +To use it, install CVAT SDK with the `pytorch` extra: + +``` +$ pip install "cvat-sdk[pytorch]" +``` + +Usage from Python: + +```python +from cvat_sdk.auto_annotation.functions.torchvision_detection import create as create_torchvision +annotate_task(, , create_torchvision(, ...)) +``` + +Usage from the CLI: + +```bash +cvat-cli auto-annotate "" --function-module cvat_sdk.auto_annotation.functions.torchvision_detection \ + -p model_name=str:"" ... +``` + +The `create` function accepts the following parameters: + +- `model_name` (`str`) - the name of the model, such as `fasterrcnn_resnet50_fpn_v2`. + This parameter is required. +- `weights_name` (`str`) - the name of a weights enum value for the model, such as `COCO_V1`. + Defaults to `DEFAULT`. + +It also accepts arbitrary additional parameters, +which are passed directly to the model constructor. + +### `cvat_sdk.auto_annotation.functions.torchvision_keypoint_detection` + +This AA function is analogous to `torchvision_detection`, +except it uses torchvision's keypoint detection models and produces skeleton annotations. +Keypoints which the model marks as invisible will be marked as occluded in CVAT. + +Refer to the previous section for usage instructions and parameter information. diff --git a/tests/python/cli/example_parameterized_function.py b/tests/python/cli/example_parameterized_function.py new file mode 100644 index 00000000000..29d9038e78b --- /dev/null +++ b/tests/python/cli/example_parameterized_function.py @@ -0,0 +1,32 @@ +# Copyright (C) 2023 CVAT.ai Corporation +# +# SPDX-License-Identifier: MIT + +from types import SimpleNamespace as namespace +from typing import List + +import cvat_sdk.auto_annotation as cvataa +import cvat_sdk.models as models +import PIL.Image + + +def create(s: str, i: int, f: float, b: bool) -> cvataa.DetectionFunction: + assert s == "string" + assert i == 123 + assert f == 5.5 + assert b is False + + spec = cvataa.DetectionFunctionSpec( + labels=[ + cvataa.label_spec("car", 0), + ], + ) + + def detect( + context: cvataa.DetectionFunctionContext, image: PIL.Image.Image + ) -> List[models.LabeledShapeRequest]: + return [ + cvataa.rectangle(0, [1, 2, 3, 4]), + ] + + return namespace(spec=spec, detect=detect) diff --git a/tests/python/cli/test_cli.py b/tests/python/cli/test_cli.py index fbb6f73fe5f..66749f992aa 100644 --- a/tests/python/cli/test_cli.py +++ b/tests/python/cli/test_cli.py @@ -328,3 +328,20 @@ def test_auto_annotate_with_file(self, fxt_new_task: Task): annotations = fxt_new_task.get_annotations() assert annotations.shapes + + def test_auto_annotate_with_parameters(self, fxt_new_task: Task): + annotations = fxt_new_task.get_annotations() + assert not annotations.shapes + + self.run_cli( + "auto-annotate", + str(fxt_new_task.id), + f"--function-module={__package__}.example_parameterized_function", + "-ps=str:string", + "-pi=int:123", + "-pf=float:5.5", + "-pb=bool:false", + ) + + annotations = fxt_new_task.get_annotations() + assert annotations.shapes diff --git a/tests/python/sdk/test_auto_annotation.py b/tests/python/sdk/test_auto_annotation.py index c41b655d6c5..142c4354c4d 100644 --- a/tests/python/sdk/test_auto_annotation.py +++ b/tests/python/sdk/test_auto_annotation.py @@ -18,6 +18,11 @@ from .util import make_pbar +try: + import torchvision.models as torchvision_models +except ModuleNotFoundError: + torchvision_models = None + @pytest.fixture(autouse=True) def _common_setup( @@ -553,3 +558,157 @@ def test_non_skeleton_with_elements(self): ], "non-skeleton shape with elements", ) + + +if torchvision_models is not None: + import torch + import torch.nn as nn + + class FakeTorchvisionDetector(nn.Module): + def __init__(self, label_id: int) -> None: + super().__init__() + self._label_id = label_id + + def forward(self, images: List[torch.Tensor]) -> List[dict]: + assert isinstance(images, list) + assert all(isinstance(t, torch.Tensor) for t in images) + + return [ + { + "boxes": torch.tensor([[1, 2, 3, 4]]), + "labels": torch.tensor([self._label_id]), + } + ] + + def fake_get_detection_model(name: str, weights, test_param): + assert test_param == "expected_value" + + car_label_id = weights.meta["categories"].index("car") + + return FakeTorchvisionDetector(label_id=car_label_id) + + class FakeTorchvisionKeypointDetector(nn.Module): + def __init__(self, label_id: int, keypoint_names: List[str]) -> None: + super().__init__() + self._label_id = label_id + self._keypoint_names = keypoint_names + + def forward(self, images: List[torch.Tensor]) -> List[dict]: + assert isinstance(images, list) + assert all(isinstance(t, torch.Tensor) for t in images) + + return [ + { + "labels": torch.tensor([self._label_id]), + "keypoints": torch.tensor( + [ + [ + [hash(name) % 100, 0, 1 if name.startswith("right_") else 0] + for i, name in enumerate(self._keypoint_names) + ] + ] + ), + } + ] + + def fake_get_keypoint_detection_model(name: str, weights, test_param): + assert test_param == "expected_value" + + person_label_id = weights.meta["categories"].index("person") + + return FakeTorchvisionKeypointDetector( + label_id=person_label_id, keypoint_names=weights.meta["keypoint_names"] + ) + + +@pytest.mark.skipif(torchvision_models is None, reason="torchvision is not installed") +class TestAutoAnnotationFunctions: + @pytest.fixture(autouse=True) + def setup( + self, + tmp_path: Path, + fxt_login: Tuple[Client, str], + ): + self.client = fxt_login[0] + self.image = generate_image_file("1.png", size=(100, 100)) + + image_dir = tmp_path / "images" + image_dir.mkdir() + + image_path = image_dir / self.image.name + image_path.write_bytes(self.image.getbuffer()) + + self.task = self.client.tasks.create_from_data( + models.TaskWriteRequest( + "Auto-annotation test task", + labels=[ + models.PatchedLabelRequest( + name="person", + type="skeleton", + sublabels=[ + models.SublabelRequest(name="left_eye"), + models.SublabelRequest(name="right_eye"), + ], + ), + models.PatchedLabelRequest(name="car"), + ], + ), + resources=[image_path], + ) + + task_labels = self.task.get_labels() + self.task_labels_by_id = {label.id: label for label in task_labels} + + person_label = next(label for label in task_labels if label.name == "person") + self.person_sublabels_by_id = {sl.id: sl for sl in person_label.sublabels} + + def test_torchvision_detection(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(torchvision_models, "get_model", fake_get_detection_model) + + import cvat_sdk.auto_annotation.functions.torchvision_detection as td + + cvataa.annotate_task( + self.client, + self.task.id, + td.create("fasterrcnn_resnet50_fpn_v2", "COCO_V1", test_param="expected_value"), + allow_unmatched_labels=True, + ) + + annotations = self.task.get_annotations() + + assert len(annotations.shapes) == 1 + assert self.task_labels_by_id[annotations.shapes[0].label_id].name == "car" + assert annotations.shapes[0].type.value == "rectangle" + assert annotations.shapes[0].points == [1, 2, 3, 4] + + def test_torchvision_keypoint_detection(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(torchvision_models, "get_model", fake_get_keypoint_detection_model) + + import cvat_sdk.auto_annotation.functions.torchvision_keypoint_detection as tkd + + cvataa.annotate_task( + self.client, + self.task.id, + tkd.create("keypointrcnn_resnet50_fpn", "COCO_V1", test_param="expected_value"), + allow_unmatched_labels=True, + ) + + annotations = self.task.get_annotations() + + assert len(annotations.shapes) == 1 + assert self.task_labels_by_id[annotations.shapes[0].label_id].name == "person" + assert annotations.shapes[0].type.value == "skeleton" + assert len(annotations.shapes[0].elements) == 2 + + elements = sorted( + annotations.shapes[0].elements, + key=lambda e: self.person_sublabels_by_id[e.label_id].name, + ) + + assert self.person_sublabels_by_id[elements[0].label_id].name == "left_eye" + assert elements[0].points[0] == hash("left_eye") % 100 + assert elements[0].occluded + + assert self.person_sublabels_by_id[elements[1].label_id].name == "right_eye" + assert elements[1].points[0] == hash("right_eye") % 100 + assert not elements[1].occluded From d61a7b1c2233aba3e1355918674806dcd04203e3 Mon Sep 17 00:00:00 2001 From: Mariia Acoca <39969264+mdacoca@users.noreply.github.com> Date: Fri, 11 Aug 2023 15:47:04 +0200 Subject: [PATCH 29/32] release notes update (#6656) --- CHANGELOG.md | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cecc6f5074c..da0a41feee0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,27 +6,31 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## \[Unreleased] + ### Added -- \[SDK\] A `DeferredTqdmProgressReporter` class, which doesn't have glitchy output - like `TqdmProgressReporter` in certain circumstances +- \[SDK\] Introduced the `DeferredTqdmProgressReporter` class, + which avoids the glitchy output seen with the `TqdmProgressReporter` under certain circumstances () -- \[SDK, CLI\] A `cvat_sdk.auto_annotation` module that provides - functionality for automatically annotating a task by running a - user-provided function on the local machine, and a corresponding CLI command - (`auto-annotate`) +- \[SDK, CLI\] Added the `cvat_sdk.auto_annotation` + module, providing functionality to automatically annotate tasks + by executing a user-provided function on the local machine. + A corresponding CLI command (`auto-annotate`) is also available. + Some predefined functions using torchvision are also available. (, ) -- Cached frames indication on the interface () +- Included an indication for cached frames in the interface + () ### Changed -- Increased default guide assets limitations (30 assets, up to 10Mb each) +- Raised the default guide assets limitations to 30 assets, + with a maximum size of 10MB each () - \[SDK\] Custom `ProgressReporter` implementations should now override `start2` instead of `start` + The old implementation is still supported. () -- Optimized memory using & code in decoding module () - +- Improved memory optimization and code in the decoding module () ### Deprecated @@ -34,14 +38,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed -- The YOLOv5 serverless function +- Removed the YOLOv5 serverless function () ### Fixed -- Accidentally using prebuilt FFmpeg bundled in PyAV instead of the custom - build. -- Fix filename for label in CamVid format () +- Corrected an issue where the prebuilt FFmpeg bundled in PyAV + was being used instead of the custom build. +- Fixed the filename for labels in the CamVid format () ### Security @@ -84,7 +88,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 () - \[API\] Fixed API issue related to file downloading failures for filenames with special characters () - \[Helm\] In Helm, we've resolved an issue with multiple caches - in the same RWX volume, which was preventing db migration from starting () + in the same RWX volume, which was preventing db migration from starting () ## \[2.5.1\] - 2023-07-19 From 250c08c86a42d076d2e507f4042d16e715c0dc86 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Fri, 11 Aug 2023 16:54:42 +0300 Subject: [PATCH 30/32] Bump version to 2.6.0 (final) --- cvat-cli/src/cvat_cli/version.py | 2 +- cvat-sdk/gen/generate.sh | 2 +- cvat/__init__.py | 2 +- docker-compose.yml | 18 +++++++++--------- helm-chart/values.yaml | 4 ++-- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/cvat-cli/src/cvat_cli/version.py b/cvat-cli/src/cvat_cli/version.py index 43921c2d67c..840688e3f56 100644 --- a/cvat-cli/src/cvat_cli/version.py +++ b/cvat-cli/src/cvat_cli/version.py @@ -1 +1 @@ -VERSION = "2.5.2" +VERSION = "2.6.0" diff --git a/cvat-sdk/gen/generate.sh b/cvat-sdk/gen/generate.sh index 5bde94e68b0..0f1b1f997fd 100755 --- a/cvat-sdk/gen/generate.sh +++ b/cvat-sdk/gen/generate.sh @@ -8,7 +8,7 @@ set -e GENERATOR_VERSION="v6.0.1" -VERSION="2.5.2" +VERSION="2.6.0" LIB_NAME="cvat_sdk" LAYER1_LIB_NAME="${LIB_NAME}/api_client" DST_DIR="$(cd "$(dirname -- "$0")/.." && pwd)" diff --git a/cvat/__init__.py b/cvat/__init__.py index d227ef70902..43f1aba73a8 100644 --- a/cvat/__init__.py +++ b/cvat/__init__.py @@ -4,6 +4,6 @@ from cvat.utils.version import get_version -VERSION = (2, 6, 0, 'alpha', 0) +VERSION = (2, 6, 0, 'final', 0) __version__ = get_version(VERSION) diff --git a/docker-compose.yml b/docker-compose.yml index 5ea0ff9f76f..bd9d6db66c4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,7 +25,7 @@ services: cvat_server: container_name: cvat_server - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -64,7 +64,7 @@ services: cvat_utils: container_name: cvat_utils - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -89,7 +89,7 @@ services: cvat_worker_import: container_name: cvat_worker_import - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -112,7 +112,7 @@ services: cvat_worker_export: container_name: cvat_worker_export - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -135,7 +135,7 @@ services: cvat_worker_annotation: container_name: cvat_worker_annotation - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -158,7 +158,7 @@ services: cvat_worker_webhooks: container_name: cvat_worker_webhooks - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -182,7 +182,7 @@ services: cvat_worker_quality_reports: container_name: cvat_worker_quality_reports - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -204,7 +204,7 @@ services: cvat_worker_analytics_reports: container_name: cvat_worker_analytics_reports - image: cvat/server:${CVAT_VERSION:-dev} + image: cvat/server:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_redis @@ -227,7 +227,7 @@ services: cvat_ui: container_name: cvat_ui - image: cvat/ui:${CVAT_VERSION:-dev} + image: cvat/ui:${CVAT_VERSION:-v2.6.0} restart: always depends_on: - cvat_server diff --git a/helm-chart/values.yaml b/helm-chart/values.yaml index 5ebee91dfd6..0338799e72e 100644 --- a/helm-chart/values.yaml +++ b/helm-chart/values.yaml @@ -95,7 +95,7 @@ cvat: additionalVolumeMounts: [] replicas: 1 image: cvat/server - tag: dev + tag: v2.6.0 imagePullPolicy: Always permissionFix: enabled: true @@ -119,7 +119,7 @@ cvat: frontend: replicas: 1 image: cvat/ui - tag: dev + tag: v2.6.0 imagePullPolicy: Always labels: {} # test: test From efb3cd67e9ce7e7148b75d93e009afbdb83c3d17 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Fri, 11 Aug 2023 16:57:35 +0300 Subject: [PATCH 31/32] Clean up the changelog for release --- CHANGELOG.md | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index da0a41feee0..497654b8876 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## \[Unreleased] +## \[2.6.0\] - 2023-08-11 ### Added @@ -32,10 +32,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 () - Improved memory optimization and code in the decoding module () -### Deprecated - -- TDB - ### Removed - Removed the YOLOv5 serverless function @@ -47,10 +43,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 was being used instead of the custom build. - Fixed the filename for labels in the CamVid format () -### Security - -- TDB - ## \[2.5.2\] - 2023-07-27 ### Added From f416df19706a898154aadc10482164a4ee210b5d Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Fri, 11 Aug 2023 16:54:42 +0300 Subject: [PATCH 32/32] Update cvat-cli SDK dependency version --- cvat-cli/requirements/base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cvat-cli/requirements/base.txt b/cvat-cli/requirements/base.txt index 612076fc5c7..923c04ae2ae 100644 --- a/cvat-cli/requirements/base.txt +++ b/cvat-cli/requirements/base.txt @@ -1,3 +1,3 @@ -cvat-sdk~=2.5.0 +cvat-sdk~=2.6.0 Pillow>=6.2.0 setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability