diff --git a/api/openapi.json b/api/openapi.json index f96c60ca..de50157c 100644 --- a/api/openapi.json +++ b/api/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "osparc.io web API (dev)", "description": "osparc-simcore public API specifications", - "version": "0.5.1-dev" + "version": "0.6.0-dev" }, "paths": { "/v0/meta": { @@ -2282,7 +2282,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2449,7 +2449,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2588,7 +2588,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2718,7 +2718,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2801,7 +2801,7 @@ "solvers" ], "summary": "Start Job", - "description": "Starts job job_id created with the solver solver_key:version\n\nNew in *version 0.4.3*: cluster_id", + "description": "Starts job job_id created with the solver solver_key:version\n\nNew in *version 0.4.3*: cluster_id\nNew in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation", "operationId": "start_job", "parameters": [ { @@ -2846,7 +2846,7 @@ } ], "responses": { - "200": { + "202": { "description": "Successful Response", "content": { "application/json": { @@ -2867,7 +2867,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -2926,12 +2926,32 @@ } } }, + "200": { + "description": "Job already started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobStatus" + } + } + } + }, + "406": { + "description": "Cluster not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { - "description": "Validation Error", + "description": "Configuration error", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/HTTPValidationError" + "$ref": "#/components/schemas/ErrorGet" } } } @@ -3005,7 +3025,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -3143,7 +3163,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -3561,7 +3581,7 @@ } }, "404": { - "description": "Job not found", + "description": "Job/wallet/pricing details not found", "content": { "application/json": { "schema": { @@ -4837,6 +4857,7 @@ "studies" ], "summary": "Start Study Job", + "description": "New in *version 0.6.0*: This endpoint responds with a 202 when successfully starting a computation", "operationId": "start_study_job", "parameters": [ { @@ -4871,7 +4892,7 @@ } ], "responses": { - "200": { + "202": { "description": "Successful Response", "content": { "application/json": { @@ -4881,12 +4902,102 @@ } } }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "404": { + "description": "Job/wallet/pricing details not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "200": { + "description": "Job already started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobStatus" + } + } + } + }, + "406": { + "description": "Cluster not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { - "description": "Validation Error", + "description": "Configuration error", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/HTTPValidationError" + "$ref": "#/components/schemas/ErrorGet" } } } @@ -5073,12 +5184,12 @@ ] } }, - "/v0/studies/{study_id}/jobs/{job_id}/outputs/logfile": { - "post": { + "/v0/studies/{study_id}/jobs/{job_id}/outputs/log-links": { + "get": { "tags": [ "studies" ], - "summary": "Get Study Job Output Logfile", + "summary": "Get download links for study job log files", "operationId": "get_study_job_output_logfile", "parameters": [ { @@ -5103,8 +5214,15 @@ } ], "responses": { - "501": { - "description": "Successful Response" + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JobLogsMap" + } + } + } }, "422": { "description": "Validation Error", @@ -5116,7 +5234,12 @@ } } } - } + }, + "security": [ + { + "HTTPBasic": [] + } + ] } }, "/v0/studies/{study_id}/jobs/{job_id}/metadata": { @@ -5457,6 +5580,32 @@ } ] } + }, + "/v0/credits/price": { + "get": { + "tags": [ + "credits" + ], + "summary": "Get Credits Price", + "operationId": "get_credits_price", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetCreditPrice" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } } }, "components": { @@ -5658,6 +5807,33 @@ ], "title": "FileUploadData" }, + "GetCreditPrice": { + "properties": { + "productName": { + "type": "string", + "title": "Productname" + }, + "usdPerCredit": { + "type": "number", + "minimum": 0.0, + "title": "Usdpercredit", + "description": "Price of a credit in USD. If None, then this product's price is UNDEFINED" + }, + "minPaymentAmountUsd": { + "type": "integer", + "minimum": 0, + "title": "Minpaymentamountusd", + "description": "Minimum amount (included) in USD that can be paid for this productCan be None if this product's price is UNDEFINED" + } + }, + "type": "object", + "required": [ + "productName", + "usdPerCredit", + "minPaymentAmountUsd" + ], + "title": "GetCreditPrice" + }, "Groups": { "properties": { "me": { @@ -5820,6 +5996,23 @@ } } }, + "JobLogsMap": { + "properties": { + "log_links": { + "items": { + "$ref": "#/components/schemas/LogLink" + }, + "type": "array", + "title": "Log Links", + "description": "Array of download links" + } + }, + "type": "object", + "required": [ + "log_links" + ], + "title": "JobLogsMap" + }, "JobMetadata": { "properties": { "job_id": { @@ -6031,6 +6224,27 @@ "type": "object", "title": "Links" }, + "LogLink": { + "properties": { + "node_name": { + "type": "string", + "title": "Node Name" + }, + "download_link": { + "type": "string", + "maxLength": 65536, + "minLength": 1, + "format": "uri", + "title": "Download Link" + } + }, + "type": "object", + "required": [ + "node_name", + "download_link" + ], + "title": "LogLink" + }, "Meta": { "properties": { "name": { diff --git a/clients/python/client/osparc/_files_api.py b/clients/python/client/osparc/_files_api.py index 7bc9a9f6..0d35e596 100644 --- a/clients/python/client/osparc/_files_api.py +++ b/clients/python/client/osparc/_files_api.py @@ -119,7 +119,7 @@ async def upload_file_async( configuration=self.api_client.configuration, timeout=timeout_seconds ) as session: with logging_redirect_tqdm(): - _logger.info("Uploading %s in %i chunks", file.name, n_urls) + _logger.info("Uploading %s in %i chunk(s)", file.name, n_urls) async for chunck, size in tqdm( file_chunk_generator(file, chunk_size), total=n_urls, @@ -206,7 +206,7 @@ def _search_files( sha256_checksum: Optional[str] = None, timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS, ) -> PaginationGenerator: - def pagination_method(): + def _pagination_method(): return super(FilesApi, self).search_files_page( file_id=file_id, sha256_checksum=sha256_checksum, @@ -214,7 +214,7 @@ def pagination_method(): ) return PaginationGenerator( - first_page_callback=pagination_method, + first_page_callback=_pagination_method, api_client=self.api_client, base_url=self.api_client.configuration.host, auth=self._auth, diff --git a/clients/python/client/osparc/_http_client.py b/clients/python/client/osparc/_http_client.py index 98b8c6a6..19c3e901 100644 --- a/clients/python/client/osparc/_http_client.py +++ b/clients/python/client/osparc/_http_client.py @@ -88,6 +88,9 @@ async def delete(self, *args, **kwargs) -> httpx.Response: async def patch(self, *args, **kwargs) -> httpx.Response: return await self._request(self._client.patch, *args, **kwargs) + async def get(self, *args, **kwargs) -> httpx.Response: + return await self._request(self._client.get, *args, **kwargs) + def _wait_callback(self, retry_state: tenacity.RetryCallState) -> int: assert retry_state.outcome is not None response: httpx.Response = retry_state.outcome.exception().response diff --git a/clients/python/client/osparc/_solvers_api.py b/clients/python/client/osparc/_solvers_api.py index d35e9d82..6e5cce61 100644 --- a/clients/python/client/osparc/_solvers_api.py +++ b/clients/python/client/osparc/_solvers_api.py @@ -1,12 +1,18 @@ from typing import Any, List, Optional import httpx -from osparc_client import OnePageSolverPort, SolverPort +from osparc_client import JobInputs, OnePageSolverPort, SolverPort from osparc_client import SolversApi as _SolversApi from . import ApiClient from ._models import ParentProjectInfo -from ._utils import PaginationGenerator, dev_feature, dev_features_enabled +from ._utils import ( + _DEFAULT_PAGINATION_LIMIT, + _DEFAULT_PAGINATION_OFFSET, + PaginationGenerator, + dev_feature, + dev_features_enabled, +) class SolversApi(_SolversApi): @@ -60,18 +66,23 @@ def jobs(self, solver_key: str, version: str) -> PaginationGenerator: (its "length") """ - def pagination_method(): + def _pagination_method(): return super(SolversApi, self).get_jobs_page( - solver_key=solver_key, version=version, limit=20, offset=0 + solver_key=solver_key, + version=version, + limit=_DEFAULT_PAGINATION_LIMIT, + offset=_DEFAULT_PAGINATION_OFFSET, ) return PaginationGenerator( - first_page_callback=pagination_method, + first_page_callback=_pagination_method, api_client=self.api_client, base_url=self.api_client.configuration.host, auth=self._auth, ) - def create_job(self, solver_key, version, job_inputs, **kwargs): + def create_job( + self, solver_key: str, version: str, job_inputs: JobInputs, **kwargs + ): kwargs = {**kwargs, **ParentProjectInfo().model_dump(exclude_none=True)} return super().create_job(solver_key, version, job_inputs, **kwargs) diff --git a/clients/python/client/osparc/_studies_api.py b/clients/python/client/osparc/_studies_api.py index 89138fe8..a3a10d81 100644 --- a/clients/python/client/osparc/_studies_api.py +++ b/clients/python/client/osparc/_studies_api.py @@ -1,9 +1,24 @@ -from typing import Any +import asyncio +import logging +from pathlib import Path +from tempfile import mkdtemp +from typing import Any, Optional +import httpx +from osparc_client import ApiClient, JobInputs, JobLogsMap, PageStudy from osparc_client import StudiesApi as _StudiesApi +from tqdm.asyncio import tqdm_asyncio +from ._http_client import AsyncHttpClient from ._models import ParentProjectInfo -from ._utils import dev_features_enabled +from ._utils import ( + _DEFAULT_PAGINATION_LIMIT, + _DEFAULT_PAGINATION_OFFSET, + PaginationGenerator, + dev_features_enabled, +) + +_logger = logging.getLogger(__name__) class StudiesApi(_StudiesApi): @@ -24,15 +39,96 @@ class StudiesApi(_StudiesApi): "stop_study_job", ] + def __init__(self, api_client: Optional[ApiClient] = None): + """Construct object + + Args: + api_client (ApiClient, optinal): osparc.ApiClient object + """ + self._super: _StudiesApi = super() + self._super.__init__(api_client) + user: Optional[str] = self.api_client.configuration.username + passwd: Optional[str] = self.api_client.configuration.password + self._auth: Optional[httpx.BasicAuth] = ( + httpx.BasicAuth(username=user, password=passwd) + if (user is not None and passwd is not None) + else None + ) + def __getattribute__(self, name: str) -> Any: if (name in StudiesApi._dev_features) and (not dev_features_enabled()): raise NotImplementedError(f"StudiesApi.{name} is still under development") return super().__getattribute__(name) - def create_study_job(self, study_id, job_inputs, **kwargs): + def create_study_job(self, study_id: str, job_inputs: JobInputs, **kwargs): kwargs = {**kwargs, **ParentProjectInfo().model_dump(exclude_none=True)} return super().create_study_job(study_id, job_inputs, **kwargs) - def clone_study(self, study_id, **kwargs): + def clone_study(self, study_id: str, **kwargs): kwargs = {**kwargs, **ParentProjectInfo().model_dump(exclude_none=True)} return super().clone_study(study_id, **kwargs) + + def studies(self) -> PaginationGenerator: + def _pagination_method(): + page_study = super(StudiesApi, self).list_studies( + limit=_DEFAULT_PAGINATION_LIMIT, offset=_DEFAULT_PAGINATION_OFFSET + ) + assert isinstance(page_study, PageStudy) # nosec + return page_study + + return PaginationGenerator( + first_page_callback=_pagination_method, + api_client=self.api_client, + base_url=self.api_client.configuration.host, + auth=self._auth, + ) + + def get_study_job_output_logfiles(self, study_id: str, job_id: str) -> Path: + return asyncio.run( + self.get_study_job_output_logfiles_async(study_id=study_id, job_id=job_id) + ) + + async def get_study_job_output_logfiles_async( + self, study_id: str, job_id: str, download_dir: Optional[Path] = None + ) -> Path: + """Download study logs. The log from each node will + appear as a file with the node's name in the directory""" + if download_dir is not None and not download_dir.is_dir(): + raise RuntimeError(f"{download_dir=} must be a valid directory") + logs_map = super().get_study_job_output_logfile(study_id, job_id) + assert isinstance(logs_map, JobLogsMap) # nosec + log_links = logs_map.log_links + assert log_links # nosec + + folder = download_dir or Path(mkdtemp()).resolve() + assert folder.is_dir() # nosec + async with AsyncHttpClient( + configuration=self.api_client.configuration + ) as client: + + async def _download(unique_node_name: str, download_link: str) -> None: + response = await client.get(download_link) + response.raise_for_status() + file = folder / unique_node_name + ct = 1 + while file.exists(): + file = file.with_stem(f"{file.stem}({ct})") + ct += 1 + file.touch() + for chunk in response.iter_bytes(): + file.write_bytes(chunk) + + tasks = [ + asyncio.create_task(_download(link.node_name, link.download_link)) + for link in log_links + ] + _logger.info( + "Downloading log files for study_id=%s and job_id=%s...", + study_id, + job_id, + ) + await tqdm_asyncio.gather( + *tasks, disable=(not _logger.isEnabledFor(logging.INFO)) + ) + + return folder diff --git a/clients/python/client/osparc/_utils.py b/clients/python/client/osparc/_utils.py index 98ed2557..77ebc78b 100644 --- a/clients/python/client/osparc/_utils.py +++ b/clients/python/client/osparc/_utils.py @@ -24,6 +24,9 @@ _MB = _KB * 1024 # in bytes _GB = _MB * 1024 # in bytes +_DEFAULT_PAGINATION_LIMIT: int = 20 +_DEFAULT_PAGINATION_OFFSET: int = 0 + DEFAULT_TIMEOUT_SECONDS: int = 30 * 60 Page = Union[PageJob, PageFile, PageSolver, PageStudy] diff --git a/clients/python/test/e2e/conftest.py b/clients/python/test/e2e/conftest.py index e67ebc8d..f7822d93 100644 --- a/clients/python/test/e2e/conftest.py +++ b/clients/python/test/e2e/conftest.py @@ -1,12 +1,14 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import logging import os from pathlib import Path from typing import Iterable +from uuid import UUID import osparc import pytest @@ -71,3 +73,28 @@ def sleeper(api_client: osparc.ApiClient) -> osparc.Solver: "simcore/services/comp/itis/sleeper", "2.0.2" ) # type: ignore return sleeper + + +@pytest.fixture +def sleeper_study_id(api_client: osparc.ApiClient) -> UUID: + """Simple sleeper study template which takes + as input a single file containing a single integer""" + _test_study_title = "sleeper_test_study" + study_api = osparc.StudiesApi(api_client=api_client) + for study in study_api.studies(): + if study.title == _test_study_title: + return UUID(study.uid) + pytest.fail(f"Could not find {_test_study_title} study") + + +@pytest.fixture +def file_with_number( + tmp_path: Path, api_client: osparc.ApiClient +) -> Iterable[osparc.File]: + files_api = osparc.FilesApi(api_client=api_client) + file = tmp_path / "file_with_number.txt" + file.write_text("1") + server_file = files_api.upload_file(file) + yield server_file + + files_api.delete_file(server_file.id) diff --git a/clients/python/test/e2e/test_files_api.py b/clients/python/test/e2e/test_files_api.py index c7519203..57736dd3 100644 --- a/clients/python/test/e2e/test_files_api.py +++ b/clients/python/test/e2e/test_files_api.py @@ -1,3 +1,9 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import hashlib from pathlib import Path diff --git a/clients/python/test/e2e/test_notebooks.py b/clients/python/test/e2e/test_notebooks.py index 7d9567bb..f1d25d8f 100644 --- a/clients/python/test/e2e/test_notebooks.py +++ b/clients/python/test/e2e/test_notebooks.py @@ -1,3 +1,9 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import shutil import sys from pathlib import Path diff --git a/clients/python/test/e2e/test_solvers_api.py b/clients/python/test/e2e/test_solvers_api.py index b542d0d0..22c69196 100644 --- a/clients/python/test/e2e/test_solvers_api.py +++ b/clients/python/test/e2e/test_solvers_api.py @@ -1,3 +1,9 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import json import osparc @@ -64,7 +70,7 @@ async def test_logstreaming( nloglines: int = 0 url = f"/v0/solvers/{sleeper.id}/releases/{sleeper.version}/jobs/{job.id}/logstream" print(f"starting logstreaming from {url}...") - + async with async_client.stream( "GET", url, diff --git a/clients/python/test/e2e/test_studies.py b/clients/python/test/e2e/test_studies.py new file mode 100644 index 00000000..a2e80cee --- /dev/null +++ b/clients/python/test/e2e/test_studies.py @@ -0,0 +1,63 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import shutil +from pathlib import Path +from uuid import UUID + +import osparc +import pytest +import tenacity +from _utils import skip_if_osparc_version +from packaging.version import Version + + +@skip_if_osparc_version(at_least=Version("0.6.6")) +@pytest.mark.parametrize("download_dir", [True, False]) +async def test_studies_logs( + api_client: osparc.ApiClient, + file_with_number: osparc.File, + sleeper_study_id: UUID, + download_dir: bool, + tmp_path: Path, +): + studies_api = osparc.StudiesApi(api_client=api_client) + job_inputs = osparc.JobInputs( + values={ + "input_file": file_with_number, + } + ) + job = studies_api.create_study_job( + study_id=f"{sleeper_study_id}", job_inputs=job_inputs + ) + assert isinstance(job, osparc.Job) + print(f"Running study job: {job.id}") + status = studies_api.start_study_job(study_id=f"{sleeper_study_id}", job_id=job.id) + assert isinstance(status, osparc.JobStatus) + async for attempt in tenacity.AsyncRetrying( + reraise=True, + wait=tenacity.wait_fixed(1), + stop=tenacity.stop_after_delay(30), + retry=tenacity.retry_if_exception_type(AssertionError), + ): + with attempt: + status = studies_api.inspect_study_job( + study_id=f"{sleeper_study_id}", job_id=job.id + ) + assert isinstance(status, osparc.JobStatus) + assert status.stopped_at is not None + assert status.state == "SUCCESS" + try: + log_dir = await studies_api.get_study_job_output_logfiles_async( + study_id=f"{sleeper_study_id}", + job_id=job.id, + download_dir=tmp_path if download_dir else None, + ) + assert log_dir.is_dir() + n_logfiles = sum(1 for _ in log_dir.rglob("*") if _.is_file()) + assert n_logfiles > 0 + finally: + shutil.rmtree(log_dir)