From 1a0be78dead8ba087c067d4ba4f2ffa643b976f6 Mon Sep 17 00:00:00 2001 From: Whisperity Date: Wed, 28 Feb 2024 16:51:52 +0100 Subject: [PATCH] feat(server): Multiprocess migration and `db_cleanup` This patch allows the server to run the schema migration and `db_cleanup` actions in parallel across the configured products, to speed up these operations. Previously, the migration and cleanup ran in a sequential job on all products, forfeiting the benefit of having multiple CPUs on a server. As each product is a separate database and there must not be any shared resource between products, it is safe to run each migration in a separate process, in parallel. Migrations and cleanup is prepared for scheduling in a deterministic order, `ORDER BY endpoint`. (Previously it was done by the `ROWID`.) The connection to the "config" database is released early on to prevent a timeout on the unused and not changing configuration database from crashing the server during a longer-running product migration. Added a facility to create beautiful logging output in migration scripts and for the cleanup routines. This log output will now necessarily include the product's `endpoint` identifier, as the log messages are no longer sequential. --- analyzer/codechecker_analyzer/cmd/analyze.py | 6 +- analyzer/codechecker_analyzer/cmd/check.py | 6 +- codechecker_common/compatibility/__init__.py | 7 + .../multiprocessing.py} | 11 +- codechecker_common/util.py | 14 +- docs/web/db_schema_guide.md | 53 +-- web/client/codechecker_client/blame_info.py | 4 +- web/client/codechecker_client/cmd/store.py | 23 +- .../codechecker_server/api/product_server.py | 8 +- .../codechecker_server/api/report_server.py | 8 +- web/server/codechecker_server/cmd/server.py | 359 +++++++++++------- .../codechecker_server/database/database.py | 105 +++-- .../codechecker_server/database/db_cleanup.py | 132 ++++--- .../codechecker_server/migrations/__init__.py | 7 + .../migrations/config/__init__.py | 7 + .../migrations/config/env.py | 45 +-- .../migrations/config/script.py.mako | 20 +- .../versions/00099e8bc212_store_limit.py | 23 +- ...55e4b_added_permission_for_product_view.py | 16 +- ...447_share_sessions_through_the_database.py | 28 +- .../302693c76eb8_remove_db_version_table.py | 15 +- ...35ff7593cc_disable_review_status_change.py | 14 +- ...4142b58d2_authentication_session_tokens.py | 24 +- ...cf38af_add_extra_product_detail_columns.py | 48 +-- ...0b20_add_user_name_and_group_to_session.py | 24 +- ...lobal_permission_to_get_access_controls.py | 17 +- .../versions/8268fc7ca7f4_initial_schema.py | 87 +++-- .../migrations/config/versions/__init__.py | 7 + ...b5278995f41_run_limitation_for_products.py | 16 +- .../cf025b6d7998_add_confidentiality.py | 18 +- ...ec6feb991e6_new_table_for_server_config.py | 26 +- .../codechecker_server/migrations/logging.py | 89 +++++ .../migrations/report/__init__.py | 7 + .../migrations/report/env.py | 99 +---- .../migrations/report/script.py.mako | 20 +- ...e895d7_add_check_command_to_run_history.py | 14 +- ...de_add_bug_event_and_point_report_index.py | 23 +- ...85167f8568_content_hash_index_for_files.py | 16 +- .../3793e361a752_source_components.py | 20 +- .../39f9e96071c0_analyzer_statistics.py | 31 +- ..._off_and_unavailable_detection_statuses.py | 22 +- .../4b38fa14c27b_file_id_index_for_reports.py | 17 +- ...43a51e5_add_description_for_run_history.py | 15 +- .../versions/6cb6a3a41967_system_comments.py | 15 +- ...e226b5d88_review_status_for_each_report.py | 33 +- .../versions/82ca43f05c10_initial_schema.py | 293 ++++++++------ ..._add_codechecker_version_to_run_history.py | 15 +- .../9d956a0fae8d_report_annotations.py | 37 +- .../migrations/report/versions/__init__.py | 7 + ...index_for_report_and_history_id_columns.py | 13 +- .../a79677f54e48_remove_db_version_table.py | 15 +- .../versions/ad2a567e513a_git_blame_info.py | 15 +- ...5d8a21c1e4_add_analyzer_name_for_report.py | 15 +- .../c042e02cca99_extended_report_data.py | 33 +- ...enabled_and_disabled_checkers_for_a_run.py | 86 ++--- .../dabc6998b8f0_analysis_info_table.py | 60 +-- .../dd9c97ead24_share_the_locking_of_runs.py | 17 +- .../e89887e7d3f0_add_bug_path_length.py | 22 +- ...b1d6be_fix_setting_analysis_info_id_seq.py | 18 +- .../versions/fb356f0eefed_cleanup_plan.py | 24 +- web/server/codechecker_server/routing.py | 6 + web/server/codechecker_server/server.py | 143 +++++-- .../codechecker_server/session_manager.py | 3 +- web/tests/Makefile | 8 +- web/tests/libtest/codechecker.py | 18 +- 65 files changed, 1460 insertions(+), 987 deletions(-) create mode 100644 codechecker_common/compatibility/__init__.py rename codechecker_common/{multiprocesspool.py => compatibility/multiprocessing.py} (57%) create mode 100644 web/server/codechecker_server/migrations/logging.py diff --git a/analyzer/codechecker_analyzer/cmd/analyze.py b/analyzer/codechecker_analyzer/cmd/analyze.py index b676fbe9dc..ea1b011161 100644 --- a/analyzer/codechecker_analyzer/cmd/analyze.py +++ b/analyzer/codechecker_analyzer/cmd/analyze.py @@ -16,8 +16,6 @@ import os import shutil import sys - -import multiprocess from typing import List from tu_collector import tu_collector @@ -31,6 +29,7 @@ from codechecker_analyzer.buildlog import log_parser from codechecker_common import arg, logger, cmd_config, review_status_handler +from codechecker_common.compatibility.multiprocessing import cpu_count from codechecker_common.skiplist_handler import SkipListHandler, \ SkipListHandlers from codechecker_common.util import load_json @@ -169,8 +168,7 @@ def add_arguments_to_parser(parser): type=int, dest="jobs", required=False, - # pylint: disable=no-member - default=multiprocess.cpu_count(), + default=cpu_count(), help="Number of threads to use in analysis. More " "threads mean faster analysis at the cost of " "using more memory.") diff --git a/analyzer/codechecker_analyzer/cmd/check.py b/analyzer/codechecker_analyzer/cmd/check.py index abbb7add6d..5a73c0485b 100644 --- a/analyzer/codechecker_analyzer/cmd/check.py +++ b/analyzer/codechecker_analyzer/cmd/check.py @@ -17,14 +17,13 @@ import sys import tempfile -import multiprocess - from codechecker_analyzer.analyzers import analyzer_types from codechecker_analyzer.arg import \ OrderedCheckersAction, OrderedConfigAction, \ analyzer_config, checker_config, existing_abspath from codechecker_common import arg, cmd_config, logger +from codechecker_common.compatibility.multiprocessing import cpu_count from codechecker_common.source_code_comment_handler import \ REVIEW_STATUS_VALUES @@ -183,8 +182,7 @@ def add_arguments_to_parser(parser): type=int, dest="jobs", required=False, - # pylint: disable=no-member - default=multiprocess.cpu_count(), + default=cpu_count(), help="Number of threads to use in analysis. " "More threads mean faster analysis at " "the cost of using more memory.") diff --git a/codechecker_common/compatibility/__init__.py b/codechecker_common/compatibility/__init__.py new file mode 100644 index 0000000000..4259749345 --- /dev/null +++ b/codechecker_common/compatibility/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/codechecker_common/multiprocesspool.py b/codechecker_common/compatibility/multiprocessing.py similarity index 57% rename from codechecker_common/multiprocesspool.py rename to codechecker_common/compatibility/multiprocessing.py index ad23fa58be..1504804d0c 100644 --- a/codechecker_common/multiprocesspool.py +++ b/codechecker_common/compatibility/multiprocessing.py @@ -6,13 +6,14 @@ # # ------------------------------------------------------------------------- """ -Multiprocess compatibility module. +Multiprocessing compatibility module. """ - import sys -# pylint: disable=unused-import +# pylint: disable=no-name-in-module,unused-import if sys.platform in ["darwin", "win32"]: - from multiprocess import Pool as MultiProcessPool + from multiprocess import Pool # type: ignore + from multiprocess import cpu_count else: - from concurrent.futures import ProcessPoolExecutor as MultiProcessPool + from concurrent.futures import ProcessPoolExecutor as Pool # type: ignore + from multiprocessing import cpu_count diff --git a/codechecker_common/util.py b/codechecker_common/util.py index 197755968f..2d94b306a6 100644 --- a/codechecker_common/util.py +++ b/codechecker_common/util.py @@ -8,12 +8,11 @@ """ Util module. """ - - import itertools import json -from typing import TextIO import os +from typing import TextIO + import portalocker from codechecker_common.logger import get_logger @@ -34,6 +33,15 @@ def arg_match(options, args): return matched_args +def clamp(min_: int, value: int, max_: int) -> int: + """ + Clamps ``value`` to be between ``min_`` and ``max_``, inclusive. + """ + if min_ > max_: + raise ValueError("min <= max required") + return min(max(min_, value), max_) + + def chunks(iterator, n): """ Yield the next chunk if an iterable object. A chunk consists of maximum n diff --git a/docs/web/db_schema_guide.md b/docs/web/db_schema_guide.md index 4451665395..bb35946169 100644 --- a/docs/web/db_schema_guide.md +++ b/docs/web/db_schema_guide.md @@ -17,8 +17,8 @@ https://alembic.sqlalchemy.org/en/latest/autogenerate.html#what-does-autogenerat # Updating configuration database schema -Config database schema scripts can be found under the `config_db_migrate` -directory. +Config database schema scripts can be found under the +`server/codechecker_server/migrations/config/versions` directory. ## Automatic migration script generation (Online) @@ -30,20 +30,21 @@ version. The configuration database schema file can be found here: `server/codechecker_server/database/config_db_model.py` -### **Step 2**: Check the alembic.ini configuration settings +### **Step 2**: Check the `alembic.ini` configuration settings Database connection should point to the correct database. -Edit the sqlalchemy.url option in [alembic.ini]( - https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file) - according to your database configuration. +Edit the `sqlalchemy.url` option in +[alembic.ini](https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file) +according to your database configuration. ### **Step 3**: Use alembic to autogenerate migration scripts `alembic --name config_db revision --autogenerate -m "Change description"` ### **Step 4**: Check the generated scripts + The new migration script -`config_db_migrate/versions/{hash}_change_description.py` is generated. +`migrations/config/versions/{hash}_change_description.py` is generated. **You must always check the generated script because sometimes it isn't correct.** @@ -60,29 +61,34 @@ Don't forget to commit the migration script with your other changes. ## Automatic migration script generation (Online) -A Codechecker server should be started and a product should be configured with +A CodeChecker server should be started and a product should be configured with a previous database schema version. +Product (run) database schema scripts can be found under the +`server/codechecker_server/migrations/report/versions` directory. + ### **Step 1**: Update the database model The run database schema file can be found here: `server/codechecker_server/database/run_db_model.py` -### **Step 2**: Check alembic.ini configuration +### **Step 2**: Check `alembic.ini` configuration Database connection should point to the correct database. -Edit the sqlalchemy.url option in [alembic.ini]( - https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file) - according to your database configuration. +Edit the `sqlalchemy.url` option in +[alembic.ini](https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file) +according to your database configuration. #### **Step 2**: Generating migration scripts using autogenerate `alembic --name run_db revision --autogenerate -m "Change description"` #### **Step 3**: Check the generated scripts -The new migration script db_migrate/versions/{hash}_change_description.py is - generated. **You must always check the generated script because sometimes it - isn't correct.** + +The new migration script +`migrations/report/versions/{hash}_change_description.py` is generated. +**You must always check the generated script because sometimes it isn't +correct.** #### **Step 4**: Run all test cases. @@ -118,7 +124,7 @@ and the other is the run database (storing analysis reports). If there is some schema mismatch and migration is needed you will get a warning at server start. -## IMPORTANT before schema upgrade +## IMPORTANT: before schema upgrade If there is some schema change it is recommended to create a full backup of your configuration and run databases before running the migration. @@ -187,17 +193,16 @@ command. $ CodeChecker server --db-upgrade-schema Default [15:01] - Checking configuration database ... [15:01] - Database is OK. -[15:01] - Preparing schema upgrade for Default +[15:01] - Preparing schema upgrade for 'Default' [WARNING] [15:01] - Please note after migration only newer CodeChecker versions can be used to start the server [WARNING] [15:01] - It is advised to make a full backup of your run databases. +[15:01] - Checking: Default +[15:01] - [Default] Database schema mismatch: migration is available. +Do you want to upgrade 'Default' to new schema? Y(es)/n(o) y +[15:01] - [Default] Schema will be upgraded... [15:01] - ======================== -[15:01] - Upgrading: Default -[15:01] - Database schema mismatch: migration is available. -Do you want to upgrade to new schema? Y(es)/n(o) y -Upgrading schema ... -Done. -Database is OK. -[15:01] - ======================== +[15:02] - [Default] Upgrading... +[15:03] - [Default] Done upgrading. ``` Schema upgrade can be done for multiple products in a row if the diff --git a/web/client/codechecker_client/blame_info.py b/web/client/codechecker_client/blame_info.py index 034c100abb..0c338fd4cd 100644 --- a/web/client/codechecker_client/blame_info.py +++ b/web/client/codechecker_client/blame_info.py @@ -6,8 +6,8 @@ from git.exc import InvalidGitRepositoryError, GitCommandError from typing import Dict, Iterable, Optional +from codechecker_common.compatibility.multiprocessing import Pool from codechecker_common.logger import get_logger -from codechecker_common.multiprocesspool import MultiProcessPool LOG = get_logger('system') @@ -115,7 +115,7 @@ def assemble_blame_info( Returns the number of collected blame information. """ - with MultiProcessPool() as executor: + with Pool() as executor: file_blame_info = __collect_blame_info_for_files( file_paths, executor.map) diff --git a/web/client/codechecker_client/cmd/store.py b/web/client/codechecker_client/cmd/store.py index 0a45194687..cef69e1583 100644 --- a/web/client/codechecker_client/cmd/store.py +++ b/web/client/codechecker_client/cmd/store.py @@ -42,10 +42,10 @@ from codechecker_client import product from codechecker_common import arg, logger, cmd_config from codechecker_common.checker_labels import CheckerLabels +from codechecker_common.compatibility.multiprocessing import Pool from codechecker_common.source_code_comment_handler import \ SourceCodeCommentHandler from codechecker_common.util import load_json -from codechecker_common.multiprocesspool import MultiProcessPool from codechecker_web.shared import webserver_context, host_check from codechecker_web.shared.env import get_default_workspace @@ -53,7 +53,13 @@ try: from codechecker_client.blame_info import assemble_blame_info except ImportError: - pass + def assemble_blame_info(_, __) -> int: + """ + Shim for cases where Git blame info is not gatherable due to + missing libraries. + """ + raise NotImplementedError() + LOG = logger.get_logger('system') @@ -371,7 +377,7 @@ def filter_source_files_with_comments( """ jobs = file_report_positions.items() - with MultiProcessPool() as executor: + with Pool() as executor: return get_source_file_with_comments(jobs, executor.map) @@ -447,7 +453,7 @@ def assemble_zip(inputs, LOG.debug("Processing report files ...") - with MultiProcessPool() as executor: + with Pool() as executor: analyzer_result_file_reports = parse_analyzer_result_files( analyzer_result_file_paths, checker_labels, executor.map) @@ -562,14 +568,13 @@ def assemble_zip(inputs, zipf, file_paths) if stats.num_of_blame_information: - LOG.info("Collecting blame information done.") + LOG.info("Collecting blame information... Done.") else: LOG.info("No blame information found for source files.") - except NameError: + except NotImplementedError: LOG.warning( - "Collecting blame information has been failed. Make sure " - "'git' is available on your system to hide this warning " - "message.") + "Failed to collect blame information. Make sure Git is " + "installed on your system.") zipf.writestr('content_hashes.json', json.dumps(file_to_hash)) diff --git a/web/server/codechecker_server/api/product_server.py b/web/server/codechecker_server/api/product_server.py index 1d14307fdb..e1a7b18ffb 100644 --- a/web/server/codechecker_server/api/product_server.py +++ b/web/server/codechecker_server/api/product_server.py @@ -376,7 +376,8 @@ def addProduct(self, product): msg) conn_str = SQLServer \ - .from_cmdline_args(conn_str_args, IDENTIFIER, None, False, None) \ + .from_cmdline_args(conn_str_args, product.endpoint, IDENTIFIER, + None, False, None) \ .get_connection_string() is_rws_change_disabled = product.isReviewStatusChangeDisabled @@ -534,8 +535,9 @@ def editProduct(self, product_id, new_config): msg) conn_str = SQLServer \ - .from_cmdline_args(conn_str_args, IDENTIFIER, None, - False, None).get_connection_string() + .from_cmdline_args(conn_str_args, product.endpoint, + IDENTIFIER, None, False, None) \ + .get_connection_string() # If endpoint or database arguments change, the product # configuration has changed so severely, that it needs diff --git a/web/server/codechecker_server/api/report_server.py b/web/server/codechecker_server/api/report_server.py index 41f2db1136..095269c3d9 100644 --- a/web/server/codechecker_server/api/report_server.py +++ b/web/server/codechecker_server/api/report_server.py @@ -3400,8 +3400,8 @@ def removeRunReports(self, run_ids, report_filter, cmp_data): # access timestamp to file entries to delay their removal (and avoid # removing frequently accessed files). The same comment applies to # removeRun() function. - db_cleanup.remove_unused_comments(self._Session) - db_cleanup.remove_unused_analysis_info(self._Session) + db_cleanup.remove_unused_comments(self._product) + db_cleanup.remove_unused_analysis_info(self._product) return True @@ -3445,8 +3445,8 @@ def removeRun(self, run_id, run_filter): # error. An alternative solution can be adding a timestamp to file # entries to delay their removal. The same comment applies to # removeRunReports() function. - db_cleanup.remove_unused_comments(self._Session) - db_cleanup.remove_unused_analysis_info(self._Session) + db_cleanup.remove_unused_comments(self._product) + db_cleanup.remove_unused_analysis_info(self._product) return bool(runs) diff --git a/web/server/codechecker_server/cmd/server.py b/web/server/codechecker_server/cmd/server.py index e18dd82213..a65c8370bf 100644 --- a/web/server/codechecker_server/cmd/server.py +++ b/web/server/codechecker_server/cmd/server.py @@ -13,17 +13,18 @@ import argparse import errno +from functools import partial import os import signal import socket import sys import time -from typing import List +from typing import List, Optional, Tuple, cast -import psutil from alembic import config from alembic import script from alembic.util import CommandError +import psutil from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import sessionmaker @@ -31,7 +32,8 @@ from codechecker_report_converter import twodim -from codechecker_common import arg, logger, util, cmd_config +from codechecker_common import arg, cmd_config, logger, util +from codechecker_common.compatibility.multiprocessing import Pool, cpu_count from codechecker_server import instance_manager, server from codechecker_server.database import database @@ -503,6 +505,7 @@ def check_product_db_status(cfg_sql_server, migration_root, environ): prod_status = {} for pd in products: db = database.SQLServer.from_connection_string(pd.connection, + pd.endpoint, RUN_META, migration_root, interactive=False, @@ -527,7 +530,7 @@ def check_product_db_status(cfg_sql_server, migration_root, environ): def __db_status_check(cfg_sql_server, migration_root, environ, - product_name=None): + product_name=None) -> int: """ Check and print database statuses for the given product. """ @@ -539,7 +542,7 @@ def __db_status_check(cfg_sql_server, migration_root, environ, prod_statuses = check_product_db_status(cfg_sql_server, migration_root, environ) - if product_name != 'all': + if product_name != "all": avail = prod_statuses.get(product_name) if not avail: LOG.error("No product was found with this endpoint: %s", @@ -559,117 +562,205 @@ def __init__(self, product_name): self.product_name = product_name -def __db_migration(cfg_sql_server, migration_root, environ, - product_to_upgrade: str = 'all', - force_upgrade: bool = False): +def __db_migration(migration_root, + environ, + endpoint: str, + connection_string: str, + init_instead_of_upgrade: bool) -> DBStatus: + try: + db = database.SQLServer.from_connection_string(connection_string, + endpoint, + RUN_META, + migration_root, + interactive=False, + env=environ) + if init_instead_of_upgrade: + LOG.info("[%s] Initialising...", endpoint) + status = db.connect(init=True) + else: + LOG.info("[%s] Upgrading...", endpoint) + db.connect(init=False) + status = db.upgrade() + + status_str = database_status.db_status_msg.get( + status, "Unknown database status") + LOG.info("[%s] Done %s. %s", endpoint, + "initialising" if init_instead_of_upgrade else "upgrading", + status_str) + return status + except (CommandError, SQLAlchemyError): + LOG.error("A database error occurred during the init/migration of " + "'%s'", endpoint) + import traceback + traceback.print_exc() + return DBStatus.SCHEMA_INIT_ERROR if init_instead_of_upgrade \ + else DBStatus.SCHEMA_UPGRADE_FAILED + except Exception as e: + LOG.error("A generic error '%s' occurred during the init/migration " + "of '%s'", str(type(e)), endpoint) + import traceback + traceback.print_exc() + return DBStatus.SCHEMA_INIT_ERROR if init_instead_of_upgrade \ + else DBStatus.SCHEMA_UPGRADE_FAILED + + +def __db_migration_multiple( + cfg_sql_server, migration_root, environ, + products_requested_for_upgrade: Optional[List[str]] = None, + force_upgrade: bool = False +) -> int: """ - Handles database management, schema checking and migrations. + Migrates the schema for the product database + ``products_requested_for_upgrade`` if specified, or all configured + products (default). """ - LOG.info("Preparing schema upgrade for %s", str(product_to_upgrade)) - product_name = product_to_upgrade + LOG.info("Preparing schema upgrade for '%s'", + "', '".join(products_requested_for_upgrade) + if products_requested_for_upgrade else "") prod_statuses = check_product_db_status(cfg_sql_server, migration_root, environ) - prod_to_upgrade: List[str] = list() - - if product_name != "all": - avail = prod_statuses.get(product_name) + products_to_upgrade: List[str] = list() + for endpoint in (products_requested_for_upgrade or []): + avail = prod_statuses.get(endpoint) if not avail: - LOG.error("No product was found with this endpoint: %s", - product_name) + LOG.error("No product was found with endpoint '%s'", endpoint) return 1 - prod_to_upgrade.append(product_name) + products_to_upgrade.append(endpoint) else: - prod_to_upgrade = list(prod_statuses.keys()) - prod_to_upgrade.sort() + products_to_upgrade = list(prod_statuses.keys()) + products_to_upgrade.sort() + + def _get_migration_decisions() -> List[Tuple[str, str, bool]]: + # The lifetime of the CONFIG database connection is scoped to this + # helper function, as keeping it alive throughout PRODUCT migrations + # could cause timeouts. + cfg_engine = cfg_sql_server.create_engine() + cfg_session_factory = sessionmaker(bind=cfg_engine) + cfg_session = cfg_session_factory() + + scheduled_upgrades_or_inits: List[Tuple[str, str, bool]] = list() + for endpoint in products_to_upgrade: + LOG.info("Checking: %s", endpoint) + connection_str: Optional[str] = None - LOG.warning("Please note after migration only newer CodeChecker versions " - "can be used to start the server") - LOG.warning("It is advised to make a full backup of your run databases.") - - for prod in prod_to_upgrade: - LOG.info("========================") - LOG.info("Checking: %s", prod) - - endpoint, connection_str = None, None - try: - # Obtain the configuration information for the current product. - engine = cfg_sql_server.create_engine() - config_session = sessionmaker(bind=engine) - sess = config_session() - product = sess.query(ORMProduct).filter( - ORMProduct.endpoint == prod).first() - if product is None: - raise NonExistentProductError(prod) - - endpoint = product.endpoint - connection_str = product.connection - - # Close the connection to the CONFIG database. It is not needed - # anymore, but an intermittent timeout would cause scary - # exceptions if the migration itself is running too long. - sess.close() - engine.dispose() - except NonExistentProductError as nepe: - LOG.error("Attempted to upgrade product '%s', but it was not " - "found in the server's configuration database.", - nepe.product_name) - except Exception: - LOG.error("Failed to get the configuration for product '%s'", - prod) - import traceback - traceback.print_exc() - - if not endpoint or not connection_str: - continue + try: + product: Optional[ORMProduct] = cfg_session \ + .query(ORMProduct.connection) \ + .filter(ORMProduct.endpoint == endpoint) \ + .one_or_none() + if product is None: + raise NonExistentProductError(endpoint) + + connection_str = product.connection + except NonExistentProductError as nepe: + LOG.error("Attempted to upgrade product '%s', but it was not " + "found in the server's configuration database.", + nepe.product_name) + continue + except Exception: + LOG.error("Failed to get the configuration for product '%s'", + endpoint) + import traceback + traceback.print_exc() + continue - try: - db = database.SQLServer.from_connection_string(connection_str, - RUN_META, - migration_root, - interactive=False, - env=environ) - db_status = db.connect() + try: + db = database.SQLServer.from_connection_string( + cast(str, connection_str), + endpoint, + RUN_META, + migration_root, + interactive=False, + env=environ) + db_status = db.connect() + + status_str = database_status.db_status_msg.get( + db_status, "Unknown database status") + LOG.info(status_str) + + if db_status == DBStatus.SCHEMA_MISSING: + question = "Do you want to initialize a new schema for " \ + f"'{endpoint}'" \ + "? Y(es)/n(o) " + if force_upgrade or env.get_user_input(question): + LOG.info("[%s] Schema will be initialised...", + endpoint) + scheduled_upgrades_or_inits.append( + (endpoint, cast(str, connection_str), True)) + else: + LOG.info("[%s] No schema initialization will be done.", + endpoint) + elif db_status == DBStatus.SCHEMA_MISMATCH_OK: + question = f"Do you want to upgrade '{endpoint}' to new " \ + "schema? Y(es)/n(o) " + if force_upgrade or env.get_user_input(question): + LOG.info("[%s] Schema will be upgraded...", endpoint) + scheduled_upgrades_or_inits.append( + (endpoint, cast(str, connection_str), False)) + else: + LOG.info("[%s] No schema migration will be done.", + endpoint) + except (CommandError, SQLAlchemyError): + LOG.error("A database error occurred during the preparation " + "for the init/migration of '%s'", endpoint) + import traceback + traceback.print_exc() + except Exception as e: + LOG.error("A generic error '%s' occurred during the " + "preparation for the init/migration of '%s'", + str(type(e)), endpoint) + import traceback + traceback.print_exc() + + cfg_session.close() + cfg_engine.dispose() + return scheduled_upgrades_or_inits - status_str = database_status.db_status_msg.get( - db_status, "Unknown database status") - LOG.info(status_str) + LOG.warning("Please note after migration only newer CodeChecker versions " + "can be used to start the server!") + LOG.warning("It is advised to make a full backup of your run databases.") + LOG.info("========================") + scheduled_upgrades_or_inits = _get_migration_decisions() + LOG.info("========================") + + if scheduled_upgrades_or_inits: + failed_products: List[Tuple[str, DBStatus]] = list() + thr_count = util.clamp(1, len(scheduled_upgrades_or_inits), + cpu_count()) + with Pool(max_workers=thr_count) as executor: + LOG.info("Initialising/upgrading products using %d concurrent " + "jobs...", thr_count) + for product_cfg, return_status in \ + zip(scheduled_upgrades_or_inits, executor.map( + # Bind the first 2 non-changing arguments of + # __db_migration, this is fixed for the execution. + partial(__db_migration, migration_root, environ), + # Transform List[Tuple[str, str, bool]] into an + # Iterable[Tuple[str], Tuple[str], Tuple[bool]], + # and immediately unpack it, thus providing the other + # 3 arguments of __db_migration as a parameter pack. + *zip(*scheduled_upgrades_or_inits))): + if return_status != DBStatus.OK: + failed_products.append((product_cfg[0], return_status)) + + if failed_products: + LOG.error("The following products failed to upgrade: %s", + ", ".join(list(map(lambda p: "'%s' (%s)" % + (p[0], + database_status.db_status_msg.get( + p[1], "Unknown database status") + ), + failed_products)))) + else: + LOG.info("Schema initialisation(s)/upgrade(s) executed " + "successfully.") + LOG.info("========================") - if db_status == DBStatus.SCHEMA_MISSING: - question = "Do you want to initialize a new schema for " \ - + endpoint + "? Y(es)/n(o) " - if force_upgrade or env.get_user_input(question): - conn_status = db.connect(init=True) - status_str = database_status.db_status_msg.get( - conn_status, "Unknown database status") - LOG.info(status_str) - else: - LOG.info("No schema initialization was done.") - elif db_status == DBStatus.SCHEMA_MISMATCH_OK: - question = "Do you want to upgrade to new schema for " \ - + endpoint + "? Y(es)/n(o) " - if force_upgrade or env.get_user_input(question): - LOG.info("Upgrading schema ...") - new_status = db.upgrade() - LOG.info("Done.") - status_str = database_status.db_status_msg.get( - new_status, "Unknown database status") - LOG.info(status_str) - else: - LOG.info("No schema migration was done.") - except (CommandError, SQLAlchemyError): - LOG.error("A database error occurred during the init/migration " - "of '%s'", prod) - import traceback - traceback.print_exc() - except Exception as e: - LOG.error("A generic error '%s' occurred during the " - "init/migration of '%s'", str(type(e)), prod) - import traceback - traceback.print_exc() - - LOG.info("========================") + # This function always returns 0 if the upgrades were attempted, because + # the server can start with some products that have failed to init/migrate. + # It will just simply disallow the connection to those products. return 0 @@ -864,7 +955,7 @@ def server_init_start(args): context.ld_lib_path_extra) cfg_sql_server = database.SQLServer.from_cmdline_args( - vars(args), CONFIG_META, context.config_migration_root, + vars(args), "config", CONFIG_META, context.config_migration_root, interactive=True, env=environ) LOG.info("Checking configuration database ...") @@ -888,27 +979,25 @@ def server_init_start(args): force_upgrade = True if 'force_upgrade' in args else False if db_status == DBStatus.SCHEMA_MISMATCH_OK: - LOG.debug("Configuration database schema mismatch.") + LOG.debug("Configuration database schema mismatch!") LOG.debug("Schema upgrade is possible.") - LOG.warning("Please note after migration only " - "newer CodeChecker versions can be used " - "to start the server") + LOG.warning("Please note after migration only newer CodeChecker " + "versions can be used to start the server!") LOG.warning("It is advised to make a full backup of your " - "configuration database") - + "configuration database!") LOG.warning(cfg_sql_server.get_db_location()) - question = 'Do you want to upgrade to the new schema?' \ - ' Y(es)/n(o) ' + question = "Do you want to upgrade to the new schema?" \ + " Y(es)/n(o) " if force_upgrade or env.get_user_input(question): - print("Upgrading schema ...") - ret = cfg_sql_server.upgrade() - msg = database_status.db_status_msg.get( - ret, 'Unknown database status') - print(msg) - if ret != DBStatus.OK: + LOG.info("Upgrading schema ...") + new_status = cfg_sql_server.upgrade() + status_str = database_status.db_status_msg.get( + new_status, "Unknown database status") + LOG.info(status_str) + if new_status != DBStatus.OK: LOG.error("Schema migration failed") - sys.exit(ret) + sys.exit(new_status) else: LOG.info("No schema migration was done.") sys.exit(0) @@ -922,17 +1011,23 @@ def server_init_start(args): # statuses can be checked. try: if args.status: - ret = __db_status_check(cfg_sql_server, context.migration_root, - environ, args.status) + ret = __db_status_check(cfg_sql_server, + context.migration_root, + environ, + args.status) sys.exit(ret) except AttributeError: LOG.debug('Status was not in the arguments.') try: if args.product_to_upgrade: - ret = __db_migration(cfg_sql_server, context.migration_root, - environ, args.product_to_upgrade, - force_upgrade) + ret = __db_migration_multiple( + cfg_sql_server, + context.migration_root, + environ, + [args.product_to_upgrade] + if args.product_to_upgrade != "all" else None, + force_upgrade) sys.exit(ret) except AttributeError: LOG.debug('Product upgrade was not in the arguments.') @@ -946,12 +1041,11 @@ def server_init_start(args): if create_default_product: # Create a default product and add it to the configuration database. - LOG.debug("Create default product...") LOG.debug("Configuring schema and migration...") prod_server = database.SQLiteDatabase( - default_product_path, RUN_META, + "Default", default_product_path, RUN_META, context.run_migration_root, environ) LOG.debug("Checking 'Default' product database.") @@ -985,8 +1079,11 @@ def server_init_start(args): if upgrade_available: print_prod_status(prod_statuses) LOG.warning("Multiple products can be upgraded, make a backup!") - __db_migration(cfg_sql_server, context.run_migration_root, - environ, 'all', force_upgrade) + __db_migration_multiple(cfg_sql_server, + context.run_migration_root, + environ, + None, + force_upgrade) prod_statuses = check_product_db_status(cfg_sql_server, context.run_migration_root, @@ -1001,10 +1098,10 @@ def server_init_start(args): break if non_ok_db: - print("There are some database issues.") + LOG.error("There are some database issues.") if not force_upgrade: - msg = "Do you want to start the server? Y(es)/n(o) " - if not env.get_user_input(msg): + status_str = "Do you want to start the server? Y(es)/n(o) " + if not env.get_user_input(status_str): sys.exit(1) # Start database viewer. diff --git a/web/server/codechecker_server/database/database.py b/web/server/codechecker_server/database/database.py index 2faa4891fa..d2a7690a90 100644 --- a/web/server/codechecker_server/database/database.py +++ b/web/server/codechecker_server/database/database.py @@ -102,8 +102,10 @@ def __enter__(self): self.db_session = sessionmaker(bind=self.engine)() self.db_connection = self.db_session.connection() except Exception as ex: - LOG.debug("Connection error") - LOG.debug(ex) + LOG.debug("Connection error:") + LOG.debug("%s", str(ex)) + import traceback + traceback.print_exc() self.db_error = DBStatus.FAILED_TO_CONNECT return self @@ -152,14 +154,14 @@ class SQLServer(metaclass=ABCMeta): instance of the new server type if needed """ - def __init__(self, model_meta, migration_root): + def __init__(self, name_in_log: str, model_meta, migration_root): """ Sets self.migration_root. migration_root should be the path to the alembic migration scripts. Also sets the created class' model identifier to the given meta dict. """ - + self.name_in_log = name_in_log self.__model_meta = model_meta self.migration_root = migration_root @@ -177,6 +179,7 @@ def _create_schema(self): cfg = config.Config() cfg.set_main_option("script_location", self.migration_root) cfg.attributes["connection"] = db.connection + cfg.attributes["database_name"] = self.name_in_log mcontext = migration.MigrationContext.configure(db.connection) database_schema_revision = mcontext.get_current_revision() @@ -198,6 +201,8 @@ def _create_schema(self): except sqlalchemy.exc.SQLAlchemyError as alch_err: LOG.error(str(alch_err)) + import traceback + traceback.print_exc() return False except Exception as ex: @@ -277,38 +282,38 @@ def check_schema(self): # There is a schema mismatch. return DBStatus.SCHEMA_MISMATCH_OK else: - LOG.debug("Schema in the package and" - " in the database is the same.") + LOG.debug("Schema in the package and in the database " + "is the same.") LOG.debug("No schema modification is needed.") return DBStatus.OK except sqlalchemy.exc.SQLAlchemyError as alch_err: - LOG.debug(str(alch_err)) + LOG.error(str(alch_err)) + import traceback + traceback.print_exc() return DBStatus.FAILED_TO_CONNECT except CommandError as cerr: - LOG.debug("Database schema and CodeChecker is incompatible. " + LOG.error("Database schema and CodeChecker is incompatible. " "Please update CodeChecker.") - LOG.debug(str(cerr)) + LOG.error(str(cerr)) + import traceback + traceback.print_exc() return DBStatus.SCHEMA_MISMATCH_NO def upgrade(self): """ - Upgrade database db schema. + Upgrade database schema. Checks the database schema for schema mismatch. The database server has to be started before this method is called. - This method runs an alembic upgrade to HEAD. - + This method runs an Alembic upgrade to HEAD. """ - # another safety check before we initialize or upgrade the schema + # Another safety check before we initialize or upgrade the schema. ret = self.check_schema() - - migration_ok = [DBStatus.SCHEMA_MISMATCH_OK, - DBStatus.SCHEMA_MISSING] - if ret not in migration_ok: - # schema migration is not possible + if ret not in [DBStatus.SCHEMA_MISMATCH_OK, DBStatus.SCHEMA_MISSING]: + # Schema migration is not possible. return ret try: @@ -316,17 +321,18 @@ def upgrade(self): if db.error: return db.error - LOG.debug("Update/create database schema for %s", + LOG.debug("Create/update database schema for %s", self.__model_meta['identifier']) - LOG.debug("Creating new database session") cfg = config.Config() cfg.set_main_option("script_location", self.migration_root) cfg.attributes["connection"] = db.connection + cfg.attributes["database_name"] = self.name_in_log + command.upgrade(cfg, "head") db.session.commit() - LOG.debug("Upgrading database schema: Done") + LOG.debug("Upgrading database schema: Done.") return DBStatus.OK except sqlalchemy.exc.SQLAlchemyError as alch_err: @@ -338,7 +344,7 @@ def upgrade(self): except CommandError as cerr: import traceback traceback.print_exc() - LOG.debug(str(cerr)) + LOG.error(str(cerr)) return DBStatus.SCHEMA_UPGRADE_FAILED @abstractmethod @@ -353,14 +359,14 @@ def connect(self, init=False): pass @abstractmethod - def get_connection_string(self): + def get_connection_string(self) -> str: """ Returns the connection string for SQLAlchemy. DO NOT LOG THE CONNECTION STRING BECAUSE IT MAY CONTAIN THE PASSWORD FOR THE DATABASE! """ - pass + raise NotImplementedError() @abstractmethod def get_db_location(self): @@ -431,25 +437,36 @@ def connection_string_to_args(connection_string): return args @staticmethod - def from_connection_string(connection_string, model_meta, migration_root, - interactive=False, env=None): + def from_connection_string(connection_string: str, + name_in_log: str, + model_meta, + migration_root, + interactive=False, + env=None): """ Normally only this method is called form outside of this module in order to instance the proper server implementation. Parameters: - args: the dict of database arguments + connection_string: A fully formatted database URN identifying the + connection parameters. + name_in_log: A user-facing name of the current database to appear + in log output. model_meta: the meta identifier of the database model to use migration_root: path to the database migration scripts env: a run environment dictionary. """ args = SQLServer.connection_string_to_args(connection_string) - return SQLServer.from_cmdline_args(args, model_meta, migration_root, - interactive, env) + return SQLServer.from_cmdline_args(args, + name_in_log, + model_meta, + migration_root, + interactive, + env) @staticmethod - def from_cmdline_args(args, model_meta, migration_root, + def from_cmdline_args(args, name_in_log: str, model_meta, migration_root, interactive=False, env=None): """ Normally only this method is called form outside of this module in @@ -458,6 +475,8 @@ def from_cmdline_args(args, model_meta, migration_root, Parameters: args: the command line arguments from CodeChecker.py, but as a dictionary (if argparse.Namespace, use vars(args)). + name_in_log: A custom user-friendly identifier for the DB in + logs. model_meta: the meta identifier of the database model to use migration_root: path to the database migration scripts interactive: whether or not the database connection can be @@ -471,7 +490,8 @@ def from_cmdline_args(args, model_meta, migration_root, if args['postgresql']: LOG.debug("Using PostgreSQL:") - return PostgreSQLServer(model_meta, + return PostgreSQLServer(name_in_log, + model_meta, migration_root, args['dbaddress'], args['dbport'], @@ -485,8 +505,11 @@ def from_cmdline_args(args, model_meta, migration_root, LOG.debug("Using SQLite:") data_file = os.path.abspath(args['sqlite']) LOG.debug("Database at %s", data_file) - return SQLiteDatabase(data_file, model_meta, - migration_root, run_env=env) + return SQLiteDatabase(name_in_log, + data_file, + model_meta, + migration_root, + run_env=env) class PostgreSQLServer(SQLServer): @@ -494,9 +517,10 @@ class PostgreSQLServer(SQLServer): Handler for PostgreSQL. """ - def __init__(self, model_meta, migration_root, host, port, user, database, - password=None, interactive=False, run_env=None): - super(PostgreSQLServer, self).__init__(model_meta, migration_root) + def __init__(self, name_in_log, model_meta, migration_root, + host, port, user, database, password=None, + interactive=False, run_env=None): + super().__init__(name_in_log, model_meta, migration_root) self.host = host self.port = port @@ -589,7 +613,7 @@ def connect(self, init=False): return self.check_schema() - def get_connection_string(self): + def get_connection_string(self) -> str: return self._get_connection_string(self.database) def get_db_location(self): @@ -601,8 +625,9 @@ class SQLiteDatabase(SQLServer): Handler for SQLite. """ - def __init__(self, data_file, model_meta, migration_root, run_env=None): - super(SQLiteDatabase, self).__init__(model_meta, migration_root) + def __init__(self, name_in_log, data_file, model_meta, migration_root, + run_env=None): + super().__init__(name_in_log, model_meta, migration_root) self.dbpath = data_file self.run_env = run_env @@ -627,7 +652,7 @@ def connect(self, init=False): return self.check_schema() - def get_connection_string(self): + def get_connection_string(self) -> str: return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath)) def get_db_location(self): diff --git a/web/server/codechecker_server/database/db_cleanup.py b/web/server/codechecker_server/database/db_cleanup.py index 8c6feacdd6..d8095677eb 100644 --- a/web/server/codechecker_server/database/db_cleanup.py +++ b/web/server/codechecker_server/database/db_cleanup.py @@ -32,29 +32,30 @@ SQLITE_LIMIT_COMPOUND_SELECT = 500 -def remove_expired_data(session_maker): - """ Remove information that has timed out from the database. """ - remove_expired_run_locks(session_maker) +def remove_expired_data(product): + """Remove information that has timed out from the database.""" + remove_expired_run_locks(product) -def remove_unused_data(session_maker): - """ Remove dangling data (files, comments, etc.) from the database. """ - remove_unused_files(session_maker) - remove_unused_comments(session_maker) - remove_unused_analysis_info(session_maker) +def remove_unused_data(product): + """Remove dangling data (files, comments, etc.) from the database.""" + remove_unused_files(product) + remove_unused_comments(product) + remove_unused_analysis_info(product) -def update_contextual_data(session_maker, context): +def update_contextual_data(product, context): """ Updates information in the database that comes from potentially changing contextual configuration of the server package. """ - upgrade_severity_levels(session_maker, context.checker_labels) + upgrade_severity_levels(product, context.checker_labels) -def remove_expired_run_locks(session_maker): - with DBSession(session_maker) as session: - LOG.debug("Garbage collection of expired run locks started...") +def remove_expired_run_locks(product): + with DBSession(product.session_factory) as session: + LOG.debug("[%s] Garbage collection of expired run locks started...", + product.endpoint) try: locks_expired_at = datetime.now() - timedelta( seconds=RUN_LOCK_TIMEOUT_IN_DATABASE) @@ -67,13 +68,15 @@ def remove_expired_run_locks(session_maker): session.commit() - LOG.debug("Garbage collection of expired run locks finished.") + LOG.debug("[%s] Garbage collection of expired run locks " + "finished.", product.endpoint) except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: - LOG.error("Failed to remove expired run locks: %s", str(ex)) + LOG.error("[%s] Failed to remove expired run locks: %s", + product.endpoint, str(ex)) -def remove_unused_files(session_maker): +def remove_unused_files(product): # File deletion is a relatively slow operation due to database cascades. # Removing files in big chunks prevents reaching a potential database # statement timeout. This hard-coded value is a safe choice according to @@ -81,9 +84,9 @@ def remove_unused_files(session_maker): # the long terms we are planning to reduce cascade deletes by redesigning # bug_path_events and bug_report_points tables. CHUNK_SIZE = 500_000 - - with DBSession(session_maker) as session: - LOG.debug("Garbage collection of dangling files started...") + with DBSession(product.session_factory) as session: + LOG.debug("[%s] Garbage collection of dangling files started...", + product.endpoint) try: bpe_files = session.query(BugPathEvent.file_id) \ .group_by(BugPathEvent.file_id) \ @@ -117,16 +120,18 @@ def remove_unused_files(session_maker): session.commit() - LOG.debug("Garbage collection of dangling files finished.") + LOG.debug("[%s] Garbage collection of dangling files finished.", + product.endpoint) except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: - LOG.error("Failed to remove unused files: %s", str(ex)) + LOG.error("[%s] Failed to remove unused files: %s", + product.endpoint, str(ex)) -def remove_unused_comments(session_maker): - """ Remove dangling comments from the database. """ - with DBSession(session_maker) as session: - LOG.debug("Garbage collection of dangling comments started...") +def remove_unused_comments(product): + with DBSession(product.session_factory) as session: + LOG.debug("[%s] Garbage collection of dangling comments started...", + product.endpoint) try: report_hashes = session.query(Report.bug_id) \ .group_by(Report.bug_id) \ @@ -140,22 +145,23 @@ def remove_unused_comments(session_maker): session.commit() - LOG.debug("Garbage collection of dangling comments finished.") + LOG.debug("[%s] Garbage collection of dangling comments " + "finished.", product.endpoint) except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: - LOG.error("Failed to remove dangling comments: %s", str(ex)) + LOG.error("[%s] Failed to remove dangling comments: %s", + product.endpoint, str(ex)) -def remove_unused_analysis_info(session_maker): - """ Remove unused analysis information from the database. """ +def remove_unused_analysis_info(product): # Analysis info deletion is a relatively slow operation due to database # cascades. Removing files in smaller chunks prevents reaching a potential # database statement timeout. This hard-coded value is a safe choice # according to some measurements. CHUNK_SIZE = 500 - - with DBSession(session_maker) as session: - LOG.debug("Garbage collection of dangling analysis info started...") + with DBSession(product.session_factory) as session: + LOG.debug("[%s] Garbage collection of dangling analysis info " + "started...", product.endpoint) try: to_delete = session.query(AnalysisInfo.id) \ .join( @@ -182,23 +188,27 @@ def remove_unused_analysis_info(session_maker): total_count += count if total_count: - LOG.debug("%d dangling analysis info deleted.", total_count) + LOG.debug("[%s] %d dangling analysis info deleted.", + product.endpoint, total_count) session.commit() - LOG.debug("Garbage collection of dangling analysis info finished.") + LOG.debug("[%s] Garbage collection of dangling analysis info " + "finished.", product.endpoint) except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: - LOG.error("Failed to remove dangling analysis info: %s", str(ex)) + LOG.error("[%s] Failed to remove dangling analysis info: %s", + product.endpoint, str(ex)) -def upgrade_severity_levels(session_maker, checker_labels): +def upgrade_severity_levels(product, checker_labels): """ Updates the potentially changed severities to reflect the data in the current label configuration files. """ - with DBSession(session_maker) as session: - LOG.debug("Upgrading severity levels started...") + with DBSession(product.session_factory) as session: + LOG.debug("[%s] Upgrading severity levels started...", + product.endpoint) try: count = 0 for analyzer in sorted(checker_labels.get_analyzers()): @@ -215,10 +225,12 @@ def upgrade_severity_levels(session_maker, checker_labels): old_severity: str = \ Severity._VALUES_TO_NAMES[old_severity_db] except KeyError: - LOG.error("Checker '%s/%s' contains invalid " + LOG.error("[%s] Checker '%s/%s' contains invalid " "severity %d, considering as if " "'UNSPECIFIED' (0)!", - analyzer, checker, old_severity_db) + product.endpoint, + analyzer, checker, + old_severity_db) old_severity_db, old_severity = 0, "UNSPECIFIED" new_severity: str = \ checker_labels.severity(checker, analyzer) @@ -231,10 +243,10 @@ def upgrade_severity_levels(session_maker, checker_labels): # label config for the analyzer. This can mean that # the records are older than a change in the checker # naming scheme (e.g., cppchecker results pre-2021). - LOG.warning("Checker '%s/%s' (database severity: " - "'%s' (%d)) does not have a " - "corresponding entry in the label " - "config file.", + LOG.warning("[%s] Checker '%s/%s' (database severity: " + "'%s' (%d)) does not have a corresponding " + "entry in the label config file.", + product.endpoint, analyzer, checker, old_severity, old_severity_db) @@ -252,26 +264,30 @@ def upgrade_severity_levels(session_maker, checker_labels): } if len(new_sev_attempts) == 0: - LOG.debug("%s/%s: Keeping the old severity " - "intact...", analyzer, checker) + LOG.debug("[%s] %s/%s: Keeping the old severity " + "intact...", + product.endpoint, analyzer, checker) continue if len(new_sev_attempts) >= 2 and \ len(set(new_sev_attempts.values())) >= 2: - LOG.error("%s/%s: Multiple similar checkers " + LOG.error("[%s] %s/%s: Multiple similar checkers " "WITH CONFLICTING SEVERITIES were " "found instead: %s", + product.endpoint, analyzer, checker, str(list(new_sev_attempts.items()))) - LOG.debug("%s/%s: Keeping the old severity " - "intact...", analyzer, checker) + LOG.debug("[%s] %s/%s: Keeping the old severity " + "intact...", + product.endpoint, analyzer, checker) continue if len(set(new_sev_attempts.values())) == 1: attempted_name, new_severity = \ next(iter(sorted(new_sev_attempts.items()))) - LOG.info("%s/%s: Found similar checker '%s/%s' " - "(severity: '%s'), using this for the " - "upgrade.", + LOG.info("[%s] %s/%s: Found similar checker " + "'%s/%s' (severity: '%s'), using this " + "for the upgrade.", + product.endpoint, analyzer, checker, analyzer, attempted_name, new_severity) @@ -281,8 +297,9 @@ def upgrade_severity_levels(session_maker, checker_labels): new_severity_db: int = \ Severity._NAMES_TO_VALUES[new_severity] - LOG.info("Upgrading the severity of checker " + LOG.info("[%s] Upgrading the severity of checker " "'%s/%s' from '%s' (%d) to '%s' (%d).", + product.endpoint, analyzer, checker, old_severity, old_severity_db, new_severity, new_severity_db) @@ -294,11 +311,14 @@ def upgrade_severity_levels(session_maker, checker_labels): session.flush() if count: - LOG.debug("%d checker severities upgraded.", count) + LOG.debug("[%s] %d checker severities upgraded.", + product.endpoint, count) session.commit() - LOG.debug("Upgrading severity levels finished.") + LOG.debug("[%s] Upgrading severity levels finished.", + product.endpoint) except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: - LOG.error("Failed to upgrade severity levels: %s", str(ex)) + LOG.error("[%s] Failed to upgrade severity levels: %s", + product.endpoint, str(ex)) diff --git a/web/server/codechecker_server/migrations/__init__.py b/web/server/codechecker_server/migrations/__init__.py index e69de29bb2..4259749345 100644 --- a/web/server/codechecker_server/migrations/__init__.py +++ b/web/server/codechecker_server/migrations/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/web/server/codechecker_server/migrations/config/__init__.py b/web/server/codechecker_server/migrations/config/__init__.py index e69de29bb2..4259749345 100644 --- a/web/server/codechecker_server/migrations/config/__init__.py +++ b/web/server/codechecker_server/migrations/config/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/web/server/codechecker_server/migrations/config/env.py b/web/server/codechecker_server/migrations/config/env.py index 4fc4cc9cd8..1493aab3b9 100644 --- a/web/server/codechecker_server/migrations/config/env.py +++ b/web/server/codechecker_server/migrations/config/env.py @@ -5,13 +5,15 @@ # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # # ------------------------------------------------------------------------- - import os import sys from alembic import context from sqlalchemy import engine_from_config, pool +from codechecker_server.database.config_db_model import Base +from codechecker_server.migrations.logging import set_logger_database_name + # This is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config @@ -26,40 +28,20 @@ sys.path.insert(0, os.path.abspath( os.path.join(os.path.dirname(__file__), "..", ".."))) -from codechecker_server.database.config_db_model import Base - target_metadata = Base.metadata +schema = "config" -# Other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True) - - with context.begin_transaction(): - context.run_migrations() +# Other values from the config, defined by the needs of env.py, can be +# acquired: my_important_option = config.get_main_option("my_important_option") def run_migrations_online(): - """Run migrations in 'online' mode. + """ + Run migrations in 'online' mode. - In this scenario we need to create an Engine - and associate a connection with the context. + In this scenario we need to create an Engine and associate a connection + with the context. """ connectable = config.attributes.get('connection', None) if connectable is None: @@ -73,12 +55,13 @@ def run_migrations_online(): connection=connection, target_metadata=target_metadata ) + set_logger_database_name(schema, + config.attributes.get("database_name")) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() + raise NotImplementedError(f"Offline '{schema}' migration is not possible!") +run_migrations_online() diff --git a/web/server/codechecker_server/migrations/config/script.py.mako b/web/server/codechecker_server/migrations/config/script.py.mako index 43c09401bc..692c3047c9 100644 --- a/web/server/codechecker_server/migrations/config/script.py.mako +++ b/web/server/codechecker_server/migrations/config/script.py.mako @@ -1,24 +1,30 @@ -"""${message} +""" +${message} Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} +Revises: ${down_revision | comma,n} Create Date: ${create_date} - """ -# revision identifiers, used by Alembic. +from logging import getLogger + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + + +# Revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} def upgrade(): + LOG = getLogger("migration/config") ${upgrades if upgrades else "pass"} def downgrade(): + LOG = getLogger("migration/config") ${downgrades if downgrades else "pass"} diff --git a/web/server/codechecker_server/migrations/config/versions/00099e8bc212_store_limit.py b/web/server/codechecker_server/migrations/config/versions/00099e8bc212_store_limit.py index 22b459797f..505ae50774 100644 --- a/web/server/codechecker_server/migrations/config/versions/00099e8bc212_store_limit.py +++ b/web/server/codechecker_server/migrations/config/versions/00099e8bc212_store_limit.py @@ -1,28 +1,27 @@ -"""Store limit +""" +Store limit Revision ID: 00099e8bc212 -Revises: 7829789fc19c +Revises: 7829789fc19c Create Date: 2023-03-10 16:45:19.301602 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '00099e8bc212' down_revision = '7829789fc19c' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('products', sa.Column('report_limit', sa.Integer(), server_default='500000', nullable=False)) - # ### end Alembic commands ### + op.add_column('products', + sa.Column('report_limit', sa.Integer(), + server_default='500000', nullable=False)) def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### op.drop_column('products', 'report_limit') - # ### end Alembic commands ### diff --git a/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py b/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py index 1e0a05088c..9f15fb5e96 100644 --- a/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py +++ b/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py @@ -1,21 +1,21 @@ -"""Added permission for Product View +""" +Added permission for PRODUCT_VIEW Revision ID: 7ceea9232211 -Revises: 302693c76eb8 +Revises: 302693c76eb8 Create Date: 2021-05-25 09:43:15.402946 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '7ceea9232211' down_revision = '302693c76eb8' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - - table_name = 'permissions_product' column_name = 'permission' type_name = 'product_perms' diff --git a/web/server/codechecker_server/migrations/config/versions/150800b30447_share_sessions_through_the_database.py b/web/server/codechecker_server/migrations/config/versions/150800b30447_share_sessions_through_the_database.py index 5105da87ce..8d73e81c6d 100644 --- a/web/server/codechecker_server/migrations/config/versions/150800b30447_share_sessions_through_the_database.py +++ b/web/server/codechecker_server/migrations/config/versions/150800b30447_share_sessions_through_the_database.py @@ -1,27 +1,29 @@ -"""Share sessions through the database +""" +Share sessions through the database Revision ID: 150800b30447 -Revises: 8268fc7ca7f4 +Revises: 8268fc7ca7f4 Create Date: 2017-11-23 15:26:45.594141 - """ -# revision identifiers, used by Alembic. + +from alembic import op +import sqlalchemy as sa + +# Revision identifiers, used by Alembic. revision = '150800b30447' down_revision = '8268fc7ca7f4' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('sessions', - sa.Column('auth_string', sa.CHAR(64), nullable=False), - sa.Column('token', sa.CHAR(32), nullable=False), - sa.Column('last_access', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('auth_string', - name=op.f('pk_sessions')) + op.create_table( + 'sessions', + sa.Column('auth_string', sa.CHAR(64), nullable=False), + sa.Column('token', sa.CHAR(32), nullable=False), + sa.Column('last_access', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('auth_string', + name=op.f('pk_sessions')) ) diff --git a/web/server/codechecker_server/migrations/config/versions/302693c76eb8_remove_db_version_table.py b/web/server/codechecker_server/migrations/config/versions/302693c76eb8_remove_db_version_table.py index ab934a8267..76745a5e84 100644 --- a/web/server/codechecker_server/migrations/config/versions/302693c76eb8_remove_db_version_table.py +++ b/web/server/codechecker_server/migrations/config/versions/302693c76eb8_remove_db_version_table.py @@ -1,20 +1,21 @@ -"""Remove db version table +""" +Remove db_version table Revision ID: 302693c76eb8 -Revises: dec6feb991e6 +Revises: dec6feb991e6 Create Date: 2020-03-13 12:21:07.198231 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '302693c76eb8' down_revision = 'dec6feb991e6' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.drop_table('db_version') diff --git a/web/server/codechecker_server/migrations/config/versions/3335ff7593cc_disable_review_status_change.py b/web/server/codechecker_server/migrations/config/versions/3335ff7593cc_disable_review_status_change.py index 665e2b5b75..5b95cfb5c0 100644 --- a/web/server/codechecker_server/migrations/config/versions/3335ff7593cc_disable_review_status_change.py +++ b/web/server/codechecker_server/migrations/config/versions/3335ff7593cc_disable_review_status_change.py @@ -1,20 +1,20 @@ -"""Disable review status change +""" +Disable review_status change Revision ID: 3335ff7593cc -Revises: 4964142b58d2 +Revises: 4964142b58d2 Create Date: 2018-11-29 14:16:58.170551 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + +# Revision identifiers, used by Alembic. revision = '3335ff7593cc' down_revision = '4964142b58d2' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): ctx = op.get_context() diff --git a/web/server/codechecker_server/migrations/config/versions/4964142b58d2_authentication_session_tokens.py b/web/server/codechecker_server/migrations/config/versions/4964142b58d2_authentication_session_tokens.py index ea2469825a..ec90df147a 100644 --- a/web/server/codechecker_server/migrations/config/versions/4964142b58d2_authentication_session_tokens.py +++ b/web/server/codechecker_server/migrations/config/versions/4964142b58d2_authentication_session_tokens.py @@ -1,22 +1,25 @@ -"""Authentication session tokens +""" +Authentication session tokens Revision ID: 4964142b58d2 -Revises: 6b9f832d0b20 +Revises: 6b9f832d0b20 Create Date: 2018-03-28 10:21:38.593302 - """ -# revision identifiers, used by Alembic. + +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '4964142b58d2' down_revision = '6b9f832d0b20' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('auth_sessions', + op.create_table( + 'auth_sessions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_name', sa.String(), nullable=True), sa.Column('token', sa.CHAR(length=32), nullable=False), @@ -29,14 +32,17 @@ def upgrade(): nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_auth_sessions')), sa.UniqueConstraint('token', name=op.f('uq_auth_sessions_token'))) + op.drop_table('user_sessions') def downgrade(): - op.create_table('user_sessions', + op.create_table( + 'user_sessions', sa.Column('user_name', sa.VARCHAR(), nullable=False), sa.Column('token', sa.CHAR(length=32), nullable=False), sa.Column('groups', sa.VARCHAR(), nullable=True), sa.Column('last_access', sa.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('user_name', name='pk_user_sessions')) + op.drop_table('auth_sessions') diff --git a/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py b/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py index 0226b3954a..caf4813059 100644 --- a/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py +++ b/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py @@ -1,17 +1,10 @@ -"""Add columns for number of runs and latest run storage +""" +Add columns for number of runs and latest run storage Revision ID: 4db450cf38af -Revises: 7ceea9232211 +Revises: 7ceea9232211 Create Date: 2021-06-28 15:52:57.237509 - """ - -# revision identifiers, used by Alembic. -revision = '4db450cf38af' -down_revision = '7ceea9232211' -branch_labels = None -depends_on = None - from logging import getLogger from alembic import op @@ -22,39 +15,40 @@ from codechecker_web.shared import webserver_context +# Revision identifiers, used by Alembic. +revision = '4db450cf38af' +down_revision = '7ceea9232211' +branch_labels = None +depends_on = None + def upgrade(): - LOG = getLogger("migration") + LOG = getLogger("migration/config") + op.add_column( 'products', - sa.Column( - 'latest_storage_date', - sa.DateTime(), - nullable=True)) + sa.Column('latest_storage_date', sa.DateTime(), nullable=True)) op.add_column( 'products', - sa.Column( - 'num_of_runs', - sa.Integer(), - server_default="0", - nullable=True)) + sa.Column('num_of_runs', sa.Integer(), + server_default="0", nullable=True)) try: product_con = op.get_bind() products = product_con.execute( - "SELECT id, connection from products").fetchall() + "SELECT id, endpoint, connection FROM products").fetchall() context = webserver_context.get_context() - for id, connection in products: + for id_, endpoint, connection in products: sql_server = database.SQLServer.from_connection_string( - connection, RUN_META, context.run_migration_root) + connection, endpoint, RUN_META, context.run_migration_root) engine = sa.create_engine(sql_server.get_connection_string()) conn = engine.connect() - run_info = \ - conn.execute("SELECT count(*), max(date) from runs").fetchone() + run_info = conn.execute("SELECT COUNT(*), MAX(date) FROM runs") \ + .fetchone() values = [f"num_of_runs={run_info[0]}"] if run_info[1]: @@ -63,12 +57,12 @@ def upgrade(): product_con.execute(f""" UPDATE products SET {', '.join(values)} - WHERE id={id} + WHERE id={id_} """) except Exception as ex: LOG.error("Failed to fill product detail columns (num_of_runs, " "latest_storage_date): %s", ex) - pass + def downgrade(): op.drop_column('products', 'num_of_runs') diff --git a/web/server/codechecker_server/migrations/config/versions/6b9f832d0b20_add_user_name_and_group_to_session.py b/web/server/codechecker_server/migrations/config/versions/6b9f832d0b20_add_user_name_and_group_to_session.py index bafaab5b7e..c67a98e544 100644 --- a/web/server/codechecker_server/migrations/config/versions/6b9f832d0b20_add_user_name_and_group_to_session.py +++ b/web/server/codechecker_server/migrations/config/versions/6b9f832d0b20_add_user_name_and_group_to_session.py @@ -1,22 +1,25 @@ -"""Add user name and group to session +""" +Add user name and group to session Revision ID: 6b9f832d0b20 -Revises: bb5278995f41 +Revises: bb5278995f41 Create Date: 2018-03-13 10:44:38.446589 - """ -# revision identifiers, used by Alembic. + +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '6b9f832d0b20' down_revision = 'bb5278995f41' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('user_sessions', + op.create_table( + 'user_sessions', sa.Column('user_name', sa.String(), nullable=False), sa.Column('token', sa.CHAR(length=32), nullable=False), sa.Column('groups', sa.String(), nullable=True), @@ -24,14 +27,17 @@ def upgrade(): sa.PrimaryKeyConstraint('user_name', name=op.f('pk_user_sessions'))) op.create_index(op.f('ix_user_sessions_token'), 'user_sessions', ['token'], unique=False) + op.drop_table('sessions') def downgrade(): - op.create_table('sessions', + op.create_table( + 'sessions', sa.Column('auth_string', sa.CHAR(length=64), nullable=False), sa.Column('token', sa.CHAR(length=32), nullable=False), sa.Column('last_access', sa.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('auth_string', name='pk_sessions')) + op.drop_index(op.f('ix_user_sessions_token'), table_name='user_sessions') op.drop_table('user_sessions') diff --git a/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py b/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py index 9388b08251..28895abc15 100644 --- a/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py +++ b/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py @@ -1,21 +1,22 @@ -"""Global permission to get access controls +""" +Global permission to get access controls Revision ID: 7829789fc19c -Revises: cf025b6d7998 +Revises: cf025b6d7998 Create Date: 2021-10-28 11:29:08.775219 - """ -# revision identifiers, used by Alembic. -revision = '7829789fc19c' -down_revision = 'cf025b6d7998' -branch_labels = None -depends_on = None from alembic import op import sqlalchemy as sa +# Revision identifiers, used by Alembic. +revision = '7829789fc19c' +down_revision = 'cf025b6d7998' +branch_labels = None +depends_on = None + table_name = 'permissions_system' column_name = 'permission' type_name = 'sys_perms' diff --git a/web/server/codechecker_server/migrations/config/versions/8268fc7ca7f4_initial_schema.py b/web/server/codechecker_server/migrations/config/versions/8268fc7ca7f4_initial_schema.py index 808618dad5..d3db663c27 100644 --- a/web/server/codechecker_server/migrations/config/versions/8268fc7ca7f4_initial_schema.py +++ b/web/server/codechecker_server/migrations/config/versions/8268fc7ca7f4_initial_schema.py @@ -1,59 +1,72 @@ -"""Initial schema +""" +Initial schema Revision ID: 8268fc7ca7f4 -Revises: +Revises: Create Date: 2017-09-18 20:57:11.098460 - """ -# revision identifiers, used by Alembic. + +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '8268fc7ca7f4' down_revision = None branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('db_version', - sa.Column('major', sa.Integer(), nullable=False), - sa.Column('minor', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('major', 'minor', name=op.f('pk_db_version')) + op.create_table( + 'db_version', + sa.Column('major', sa.Integer(), nullable=False), + sa.Column('minor', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('major', 'minor', name=op.f('pk_db_version')) ) - op.create_table('permissions_system', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('permission', sa.Enum('SUPERUSER', name='sys_perms'), nullable=True), - sa.Column('name', sa.String(), nullable=False), - sa.Column('is_group', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_permissions_system')) + + op.create_table( + 'permissions_system', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('permission', sa.Enum('SUPERUSER', name='sys_perms'), + nullable=True), + sa.Column('name', sa.String(), nullable=False), + sa.Column('is_group', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_permissions_system')) ) - op.create_table('products', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('endpoint', sa.String(), nullable=False), - sa.Column('connection', sa.String(), nullable=False), - sa.Column('display_name', sa.String(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_products')), - sa.UniqueConstraint('endpoint', name=op.f('uq_products_endpoint')) + + op.create_table( + 'products', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('endpoint', sa.String(), nullable=False), + sa.Column('connection', sa.String(), nullable=False), + sa.Column('display_name', sa.String(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_products')), + sa.UniqueConstraint('endpoint', name=op.f('uq_products_endpoint')) ) - op.create_table('permissions_product', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('permission', sa.Enum('PRODUCT_ADMIN', 'PRODUCT_ACCESS', 'PRODUCT_STORE', name='product_perms'), nullable=True), - sa.Column('product_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('is_group', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['product_id'], ['products.id'], name=op.f('fk_permissions_product_product_id_products'), ondelete='CASCADE', initially="IMMEDIATE", deferrable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_permissions_product')) + + op.create_table( + 'permissions_product', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('permission', sa.Enum('PRODUCT_ADMIN', + 'PRODUCT_ACCESS', + 'PRODUCT_STORE', + name='product_perms'), + nullable=True), + sa.Column('product_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('is_group', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ['product_id'], ['products.id'], + name=op.f('fk_permissions_product_product_id_products'), + ondelete='CASCADE', initially="IMMEDIATE", deferrable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_permissions_product')) ) - # ### end Alembic commands ### def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### op.drop_table('permissions_product') op.drop_table('products') op.drop_table('permissions_system') op.drop_table('db_version') - # ### end Alembic commands ### diff --git a/web/server/codechecker_server/migrations/config/versions/__init__.py b/web/server/codechecker_server/migrations/config/versions/__init__.py index e69de29bb2..4259749345 100644 --- a/web/server/codechecker_server/migrations/config/versions/__init__.py +++ b/web/server/codechecker_server/migrations/config/versions/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/web/server/codechecker_server/migrations/config/versions/bb5278995f41_run_limitation_for_products.py b/web/server/codechecker_server/migrations/config/versions/bb5278995f41_run_limitation_for_products.py index 03039e648a..0f16c1968f 100644 --- a/web/server/codechecker_server/migrations/config/versions/bb5278995f41_run_limitation_for_products.py +++ b/web/server/codechecker_server/migrations/config/versions/bb5278995f41_run_limitation_for_products.py @@ -1,19 +1,21 @@ -"""Run limitation for products +""" +Run limitation for products Revision ID: bb5278995f41 -Revises: 150800b30447 +Revises: 150800b30447 Create Date: 2018-03-01 15:38:41.164141 - """ -# revision identifiers, used by Alembic. + +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'bb5278995f41' down_revision = '150800b30447' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('products', diff --git a/web/server/codechecker_server/migrations/config/versions/cf025b6d7998_add_confidentiality.py b/web/server/codechecker_server/migrations/config/versions/cf025b6d7998_add_confidentiality.py index 3471510205..d247286458 100644 --- a/web/server/codechecker_server/migrations/config/versions/cf025b6d7998_add_confidentiality.py +++ b/web/server/codechecker_server/migrations/config/versions/cf025b6d7998_add_confidentiality.py @@ -1,23 +1,25 @@ -"""Add confidentiality +""" +Add confidentiality Revision ID: cf025b6d7998 -Revises: 4db450cf38af +Revises: 4db450cf38af Create Date: 2021-09-08 13:07:08.891285 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'cf025b6d7998' down_revision = '4db450cf38af' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.add_column('products', sa.Column('confidentiality', sa.String(), nullable=True)) + op.add_column('products', + sa.Column('confidentiality', sa.String(), nullable=True)) def downgrade(): diff --git a/web/server/codechecker_server/migrations/config/versions/dec6feb991e6_new_table_for_server_config.py b/web/server/codechecker_server/migrations/config/versions/dec6feb991e6_new_table_for_server_config.py index 62e6a5f5a4..04cddc76ab 100644 --- a/web/server/codechecker_server/migrations/config/versions/dec6feb991e6_new_table_for_server_config.py +++ b/web/server/codechecker_server/migrations/config/versions/dec6feb991e6_new_table_for_server_config.py @@ -1,27 +1,29 @@ -"""New table for server config +""" +New table for server config Revision ID: dec6feb991e6 -Revises: 3335ff7593cc +Revises: 3335ff7593cc Create Date: 2019-02-05 17:36:46.527079 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'dec6feb991e6' down_revision = '3335ff7593cc' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('server_configurations', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('config_key', sa.String(), nullable=False), - sa.Column('config_value', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_server_configurations')) + op.create_table( + 'server_configurations', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('config_key', sa.String(), nullable=False), + sa.Column('config_value', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_server_configurations')) ) diff --git a/web/server/codechecker_server/migrations/logging.py b/web/server/codechecker_server/migrations/logging.py new file mode 100644 index 0000000000..12b8ffcba4 --- /dev/null +++ b/web/server/codechecker_server/migrations/logging.py @@ -0,0 +1,89 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- +import logging +import sys +from typing import Optional, cast + + +class MigrationFormatter(logging.Formatter): + """ + Truncates the filename to show only the revision that is being migrated + in the log output. + """ + def __init__(self, schema: str): + super().__init__(fmt="[%(levelname)s][%(asctime)s] " + "{migration/%(schema)s} " + "[%(database)s] " + "- %(revision)s:%(lineno)d " + "- %(message)s", + datefmt="%Y-%m-%d %H:%M:%S") + self.schema = schema + self._database: Optional[str] = None + + @property + def database(self): + return self._database or "" + + @database.setter + def database(self, db_name: str): + self._database = db_name + + def format(self, record): + record.database = self.database + record.schema = self.schema + record.revision = record.filename[:record.filename.find("_")] + return super().format(record) + + +def setup_logger(schema: str): + """ + Set up a logging system that should be used during schema migration. + These outputs are not affected by the environment that executes a + migration, e.g., by the running CodeChecker server! + + In migration scripts, use the built-in logging facilities instead of + CodeChecker's wrapper, and ensure that the name of the logger created + exactly matches ``migration/``! + """ + sys_logger = logging.getLogger("system") + codechecker_loglvl = sys_logger.getEffectiveLevel() + if codechecker_loglvl >= logging.INFO: + # This might be 30 (WARNING) if the migration is run outside of + # CodeChecker's context, e.g., in a downgrade. + codechecker_loglvl = logging.INFO + + # Use the default logging class that came with Python for the migration, + # temporarily turning away from potentially existing global changes. + existing_logger_cls = logging.getLoggerClass() + logging.setLoggerClass(logging.Logger) + logger = logging.getLogger(f"migration/{schema}") + logging.setLoggerClass(existing_logger_cls) + + if not logger.hasHandlers(): + fmt = MigrationFormatter(schema) + handler = logging.StreamHandler() + handler.setFormatter(fmt) + handler.setLevel(codechecker_loglvl) + handler.setStream(sys.stdout) + + logger.setLevel(codechecker_loglvl) + logger.addHandler(handler) + else: + handler = logger.handlers[0] + fmt = handler.formatter + + return logger, handler, cast(MigrationFormatter, fmt) + + +def set_logger_database_name(schema: str, database: str): + """ + Sets the logger's output for the current migration ``schema`` to indicate + that the actions are performed on ``database``. + """ + _, _, fmt = setup_logger(schema) + fmt.database = database diff --git a/web/server/codechecker_server/migrations/report/__init__.py b/web/server/codechecker_server/migrations/report/__init__.py index e69de29bb2..4259749345 100644 --- a/web/server/codechecker_server/migrations/report/__init__.py +++ b/web/server/codechecker_server/migrations/report/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/web/server/codechecker_server/migrations/report/env.py b/web/server/codechecker_server/migrations/report/env.py index 0bdd89f0f8..9b5fcc58fd 100644 --- a/web/server/codechecker_server/migrations/report/env.py +++ b/web/server/codechecker_server/migrations/report/env.py @@ -5,105 +5,37 @@ # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # # ------------------------------------------------------------------------- -import logging import os import sys from alembic import context from sqlalchemy import engine_from_config, pool -# This is the Alembic Config object, which provides access to the values -# within the .ini file in use. +from codechecker_server.database.run_db_model import Base +from codechecker_server.migrations.logging import set_logger_database_name + +# This is the Alembic Config object, which provides +# access to the values within the .ini file in use. config = context.config # Add model's MetaData object here for 'autogenerate' support. sys.path.insert(0, os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "..", ".."))) -from codechecker_server.database.run_db_model import Base - target_metadata = Base.metadata +schema = "report" + # Other values from the config, defined by the needs of env.py, can be # acquired: my_important_option = config.get_main_option("my_important_option") -class MigrationFormatter(logging.Formatter): - """ - Truncates the filename to show only the revision that is being migrated - in the log output. - """ - def __init__(self): - super().__init__(fmt="[%(levelname)s][%(asctime)s] " - "{migration/report} " - "[%(schemaVersion)s]:%(lineno)d " - "- %(message)s", - datefmt="%Y-%m-%d %H:%M:%S") - - def format(self, record): - record.schemaVersion = record.filename[:record.filename.find("_")] - return super().format(record) - - -def setup_logger(): - """ - Set up a logging system that should be used during schema migration. - These outputs are not affected by the environment that executes a migration, - e.g., by the running CodeChecker server! - - In migration scripts, use the built-in logging facilities instead of - CodeChecker's wrapper, and ensure that the name of the logger created - exactly matches "migration"! - """ - sys_logger = logging.getLogger("system") - codechecker_loglvl = sys_logger.getEffectiveLevel() - if codechecker_loglvl >= logging.INFO: - # This might be 30 (WARNING) if the migration is run outside of - # CodeChecker's context, e.g., in a downgrade. - codechecker_loglvl = logging.INFO - - # Use the default logging class that came with Python for the migration, - # temporarily turning away from potentially existing global changes. - existing_logger_cls = logging.getLoggerClass() - logging.setLoggerClass(logging.Logger) - logger = logging.getLogger("migration") - logging.setLoggerClass(existing_logger_cls) - - if not logger.hasHandlers(): - fmt = MigrationFormatter() - handler = logging.StreamHandler() - handler.setFormatter(fmt) - handler.setLevel(codechecker_loglvl) - handler.setStream(sys.stdout) - - logger.setLevel(codechecker_loglvl) - logger.addHandler(handler) - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True) - - with context.begin_transaction(): - context.run_migrations() - - def run_migrations_online(): - """Run migrations in 'online' mode. + """ + Run migrations in 'online' mode. - In this scenario we need to create an Engine - and associate a connection with the context. + In this scenario we need to create an Engine and associate a connection + with the context. """ connectable = config.attributes.get('connection', None) if connectable is None: @@ -117,12 +49,13 @@ def run_migrations_online(): connection=connection, target_metadata=target_metadata ) + set_logger_database_name(schema, + config.attributes.get("database_name")) with context.begin_transaction(): context.run_migrations() -setup_logger() + if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() + raise NotImplementedError(f"Offline '{schema}' migration is not possible!") +run_migrations_online() diff --git a/web/server/codechecker_server/migrations/report/script.py.mako b/web/server/codechecker_server/migrations/report/script.py.mako index 43c09401bc..6654e52ee2 100644 --- a/web/server/codechecker_server/migrations/report/script.py.mako +++ b/web/server/codechecker_server/migrations/report/script.py.mako @@ -1,24 +1,30 @@ -"""${message} +""" +${message} Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} +Revises: ${down_revision | comma,n} Create Date: ${create_date} - """ -# revision identifiers, used by Alembic. +from logging import getLogger + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + + +# Revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} def upgrade(): + LOG = getLogger("migration/report") ${upgrades if upgrades else "pass"} def downgrade(): + LOG = getLogger("migration/report") ${downgrades if downgrades else "pass"} diff --git a/web/server/codechecker_server/migrations/report/versions/080349e895d7_add_check_command_to_run_history.py b/web/server/codechecker_server/migrations/report/versions/080349e895d7_add_check_command_to_run_history.py index 6436c9dffd..c6b6080c7c 100644 --- a/web/server/codechecker_server/migrations/report/versions/080349e895d7_add_check_command_to_run_history.py +++ b/web/server/codechecker_server/migrations/report/versions/080349e895d7_add_check_command_to_run_history.py @@ -1,23 +1,21 @@ -"""Add check command to run history +""" +Add check command to run history Revision ID: 080349e895d7 -Revises: 101a9cb747de +Revises: 101a9cb747de Create Date: 2018-03-12 15:10:24.652576 - """ +from alembic import op +import sqlalchemy as sa - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = '080349e895d7' down_revision = '101a9cb747de' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('run_histories', diff --git a/web/server/codechecker_server/migrations/report/versions/101a9cb747de_add_bug_event_and_point_report_index.py b/web/server/codechecker_server/migrations/report/versions/101a9cb747de_add_bug_event_and_point_report_index.py index 6901394405..6e8a913740 100644 --- a/web/server/codechecker_server/migrations/report/versions/101a9cb747de_add_bug_event_and_point_report_index.py +++ b/web/server/codechecker_server/migrations/report/versions/101a9cb747de_add_bug_event_and_point_report_index.py @@ -1,27 +1,30 @@ -"""add bug event and point report index +""" +Add bug event and point report index Revision ID: 101a9cb747de -Revises: dd9c97ead24 +Revises: dd9c97ead24 Create Date: 2018-02-15 15:30:59.966552 - """ +from alembic import op - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = '101a9cb747de' down_revision = 'dd9c97ead24' branch_labels = None depends_on = None -from alembic import op def upgrade(): - op.create_index(op.f('ix_bug_path_events_report_id'), 'bug_path_events', ['report_id'], unique=False) - op.create_index(op.f('ix_bug_report_points_report_id'), 'bug_report_points', ['report_id'], unique=False) + op.create_index(op.f('ix_bug_path_events_report_id'), 'bug_path_events', + ['report_id'], unique=False) + op.create_index(op.f('ix_bug_report_points_report_id'), + 'bug_report_points', ['report_id'], unique=False) def downgrade(): - op.drop_index(op.f('ix_bug_path_events_report_id'), table_name='bug_path_events') - op.drop_index(op.f('ix_bug_report_points_report_id'), table_name='bug_report_points') + op.drop_index(op.f('ix_bug_path_events_report_id'), + table_name='bug_path_events') + op.drop_index(op.f('ix_bug_report_points_report_id'), + table_name='bug_report_points') diff --git a/web/server/codechecker_server/migrations/report/versions/2185167f8568_content_hash_index_for_files.py b/web/server/codechecker_server/migrations/report/versions/2185167f8568_content_hash_index_for_files.py index bee6a26eed..a1b5895908 100644 --- a/web/server/codechecker_server/migrations/report/versions/2185167f8568_content_hash_index_for_files.py +++ b/web/server/codechecker_server/migrations/report/versions/2185167f8568_content_hash_index_for_files.py @@ -1,22 +1,24 @@ -"""content hash index for files +""" +Content hash index for files Revision ID: 2185167f8568 -Revises: 5f8a443a51e5 +Revises: 5f8a443a51e5 Create Date: 2020-07-28 16:02:01.131126 - """ -# revision identifiers, used by Alembic. +from alembic import op + +# Revision identifiers, used by Alembic. revision = '2185167f8568' down_revision = '5f8a443a51e5' branch_labels = None depends_on = None -from alembic import op - def upgrade(): - op.create_index(op.f('ix_files_content_hash'), 'files', ['content_hash'], unique=False) + op.create_index(op.f('ix_files_content_hash'), 'files', ['content_hash'], + unique=False) + def downgrade(): op.drop_index(op.f('ix_files_content_hash'), table_name='files') diff --git a/web/server/codechecker_server/migrations/report/versions/3793e361a752_source_components.py b/web/server/codechecker_server/migrations/report/versions/3793e361a752_source_components.py index c2eac0cc02..39273ec486 100644 --- a/web/server/codechecker_server/migrations/report/versions/3793e361a752_source_components.py +++ b/web/server/codechecker_server/migrations/report/versions/3793e361a752_source_components.py @@ -1,31 +1,31 @@ -"""Source components +""" +Source components Revision ID: 3793e361a752 -Revises: 080349e895d7 +Revises: 080349e895d7 Create Date: 2018-04-20 09:29:24.072720 - """ +from alembic import op +import sqlalchemy as sa - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = '3793e361a752' down_revision = '080349e895d7' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('source_components', + op.create_table( + 'source_components', sa.Column('name', sa.String(), nullable=False), sa.Column('value', sa.Binary(), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('username', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('name', name=op.f('pk_source_components'))) + sa.PrimaryKeyConstraint('name', name=op.f('pk_source_components')) + ) def downgrade(): diff --git a/web/server/codechecker_server/migrations/report/versions/39f9e96071c0_analyzer_statistics.py b/web/server/codechecker_server/migrations/report/versions/39f9e96071c0_analyzer_statistics.py index 5d85dfe3e4..d7e4617379 100644 --- a/web/server/codechecker_server/migrations/report/versions/39f9e96071c0_analyzer_statistics.py +++ b/web/server/codechecker_server/migrations/report/versions/39f9e96071c0_analyzer_statistics.py @@ -1,20 +1,21 @@ -"""Analyzer statistics +""" +Analyzer statistics Revision ID: 39f9e96071c0 -Revises: 9987aa593ca7 +Revises: 9987aa593ca7 Create Date: 2018-09-05 17:45:10.746758 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '39f9e96071c0' down_revision = '9987aa593ca7' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.create_table('analyzer_statistics', @@ -25,13 +26,15 @@ def upgrade(): sa.Column('successful', sa.Integer(), nullable=True), sa.Column('failed', sa.Integer(), nullable=True), sa.Column('failed_files', sa.Binary(), nullable=True), - sa.ForeignKeyConstraint(['run_history_id'], - ['run_histories.id'], - name=op.f('fk_analyzer_statistics_run_history_id_run_histories'), - ondelete='CASCADE', - initially='DEFERRED', - deferrable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_analyzer_statistics')) + sa.ForeignKeyConstraint( + ['run_history_id'], ['run_histories.id'], + name=op.f('fk_analyzer_statistics_run_history_id_' + 'run_histories'), + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True), + sa.PrimaryKeyConstraint( + 'id', name=op.f('pk_analyzer_statistics')) ) op.create_index(op.f('ix_analyzer_statistics_run_history_id'), 'analyzer_statistics', diff --git a/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py b/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py index bb3680343b..ea85cc35b5 100644 --- a/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py +++ b/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py @@ -1,19 +1,21 @@ -"""Off and unavailable detection statuses +""" +'off' and 'unavailable' detection statuses Revision ID: 3e91d0612422 -Revises: 40112fd406e3 +Revises: 40112fd406e3 Create Date: 2018-12-19 11:16:57.107510 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '3e91d0612422' down_revision = '40112fd406e3' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa table_name = 'reports' name = 'detection_status' @@ -46,7 +48,9 @@ def upgrade(): elif dialect == 'sqlite': op.execute('PRAGMA foreign_keys=off') with op.batch_alter_table('reports') as batch_op: - batch_op.alter_column(name, existing_type=old_type, type_=new_type) + batch_op.alter_column(name, + existing_type=old_type, + type_=new_type) op.execute('PRAGMA foreign_keys=on') @@ -70,5 +74,7 @@ def downgrade(): elif dialect == 'sqlite': op.execute('PRAGMA foreign_keys=off') with op.batch_alter_table('reports') as batch_op: - batch_op.alter_column(name, existing_type=new_type, type_=old_type) + batch_op.alter_column(name, + existing_type=new_type, + type_=old_type) op.execute('PRAGMA foreign_keys=on') diff --git a/web/server/codechecker_server/migrations/report/versions/4b38fa14c27b_file_id_index_for_reports.py b/web/server/codechecker_server/migrations/report/versions/4b38fa14c27b_file_id_index_for_reports.py index 593f68431c..8826dafeb2 100644 --- a/web/server/codechecker_server/migrations/report/versions/4b38fa14c27b_file_id_index_for_reports.py +++ b/web/server/codechecker_server/migrations/report/versions/4b38fa14c27b_file_id_index_for_reports.py @@ -1,27 +1,28 @@ -"""file id index for reports +""" +file_id INDEX for reports Revision ID: 4b38fa14c27b -Revises: 82ca43f05c10 +Revises: 82ca43f05c10 Create Date: 2017-12-11 09:13:16.301478 -Add index for the file ids in the report table to speed up -file cleanup. +Add INDEX for the file_id column in the report table to speed up file cleanup. """ +from alembic import op - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = '4b38fa14c27b' down_revision = '82ca43f05c10' branch_labels = None depends_on = None -from alembic import op def upgrade(): - op.create_index(op.f('ix_reports_file_id'), 'reports', ['file_id'], unique=False) + op.create_index(op.f('ix_reports_file_id'), 'reports', ['file_id'], + unique=False) + def downgrade(): op.drop_index(op.f('ix_reports_file_id'), table_name='reports') diff --git a/web/server/codechecker_server/migrations/report/versions/5f8a443a51e5_add_description_for_run_history.py b/web/server/codechecker_server/migrations/report/versions/5f8a443a51e5_add_description_for_run_history.py index 2ab987f354..d68342994e 100644 --- a/web/server/codechecker_server/migrations/report/versions/5f8a443a51e5_add_description_for_run_history.py +++ b/web/server/codechecker_server/migrations/report/versions/5f8a443a51e5_add_description_for_run_history.py @@ -1,20 +1,21 @@ -"""Add description for run history +""" +Add description for run_history Revision ID: 5f8a443a51e5 -Revises: a79677f54e48 +Revises: a79677f54e48 Create Date: 2020-04-09 09:52:52.336709 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '5f8a443a51e5' down_revision = 'a79677f54e48' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('run_histories', diff --git a/web/server/codechecker_server/migrations/report/versions/6cb6a3a41967_system_comments.py b/web/server/codechecker_server/migrations/report/versions/6cb6a3a41967_system_comments.py index 80837819bb..378482dba9 100644 --- a/web/server/codechecker_server/migrations/report/versions/6cb6a3a41967_system_comments.py +++ b/web/server/codechecker_server/migrations/report/versions/6cb6a3a41967_system_comments.py @@ -1,20 +1,21 @@ -"""System comments +""" +System comments Revision ID: 6cb6a3a41967 -Revises: 3e91d0612422 +Revises: 3e91d0612422 Create Date: 2019-04-02 16:12:46.794131 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '6cb6a3a41967' down_revision = '3e91d0612422' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('comments', sa.Column('kind', diff --git a/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py b/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py index 3254bf5c46..0c8730af42 100644 --- a/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py +++ b/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py @@ -1,27 +1,29 @@ -"""Review status for each report +""" +Review status for each report Revision ID: 75ae226b5d88 -Revises: fb356f0eefed +Revises: fb356f0eefed Create Date: 2022-01-27 15:19:48.185835 - """ -# revision identifiers, used by Alembic. -revision = '75ae226b5d88' -down_revision = 'fb356f0eefed' -branch_labels = None -depends_on = None - from collections import defaultdict +from io import StringIO +import zlib + from alembic import op import sqlalchemy as sa -import zlib -from io import StringIO from codechecker_common.source_code_comment_handler import \ SourceCodeCommentHandler, SpellException +# Revision identifiers, used by Alembic. +revision = '75ae226b5d88' +down_revision = 'fb356f0eefed' +branch_labels = None +depends_on = None + + def upgrade(): def decode_file_content(content): return zlib.decompress(content).decode('utf-8', errors='ignore') @@ -63,8 +65,13 @@ def decode_file_content(content): # branching here needs to stay. conn.execute(""" UPDATE reports - SET (review_status, review_status_author, review_status_date, review_status_message) = - (SELECT status, author, date, message FROM review_statuses WHERE bug_hash = reports.bug_id) + SET (review_status, + review_status_author, + review_status_date, + review_status_message) = + (SELECT status, author, date, message + FROM review_statuses + WHERE bug_hash = reports.bug_id) """) elif dialect == 'postgresql': op.add_column('reports', col_rs) diff --git a/web/server/codechecker_server/migrations/report/versions/82ca43f05c10_initial_schema.py b/web/server/codechecker_server/migrations/report/versions/82ca43f05c10_initial_schema.py index 70c3b39a2e..19a8e3fbd3 100644 --- a/web/server/codechecker_server/migrations/report/versions/82ca43f05c10_initial_schema.py +++ b/web/server/codechecker_server/migrations/report/versions/82ca43f05c10_initial_schema.py @@ -1,145 +1,213 @@ -"""Initial schema +""" +Initial schema Revision ID: 82ca43f05c10 -Revises: +Revises: Create Date: 2017-09-18 21:00:11.593693 - """ +from alembic import op +import sqlalchemy as sa - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = '82ca43f05c10' down_revision = None branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('comments', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('bug_hash', sa.String(), nullable=False), - sa.Column('author', sa.String(), nullable=False), - sa.Column('message', sa.Binary(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_comments')) + op.create_table( + 'comments', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('bug_hash', sa.String(), nullable=False), + sa.Column('author', sa.String(), nullable=False), + sa.Column('message', sa.Binary(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_comments')) ) - op.create_index(op.f('ix_comments_bug_hash'), 'comments', ['bug_hash'], unique=False) - op.create_table('db_version', - sa.Column('major', sa.Integer(), nullable=False), - sa.Column('minor', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('major', 'minor', name=op.f('pk_db_version')) + op.create_index(op.f('ix_comments_bug_hash'), 'comments', ['bug_hash'], + unique=False) + + op.create_table( + 'db_version', + sa.Column('major', sa.Integer(), nullable=False), + sa.Column('minor', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('major', 'minor', name=op.f('pk_db_version')) ) - op.create_table('file_contents', - sa.Column('content_hash', sa.String(), nullable=False), - sa.Column('content', sa.Binary(), nullable=True), - sa.PrimaryKeyConstraint('content_hash', name=op.f('pk_file_contents')) + + op.create_table( + 'file_contents', + sa.Column('content_hash', sa.String(), nullable=False), + sa.Column('content', sa.Binary(), nullable=True), + sa.PrimaryKeyConstraint('content_hash', name=op.f('pk_file_contents')) ) - op.create_table('review_statuses', - sa.Column('bug_hash', sa.String(), nullable=False), - sa.Column('status', sa.Enum('unreviewed', 'confirmed', 'false_positive', 'intentional', name='review_status'), nullable=False), - sa.Column('author', sa.String(), nullable=False), - sa.Column('message', sa.Binary(), nullable=False), - sa.Column('date', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('bug_hash', name=op.f('pk_review_statuses')) + + op.create_table( + 'review_statuses', + sa.Column('bug_hash', sa.String(), nullable=False), + sa.Column('status', sa.Enum('unreviewed', + 'confirmed', + 'false_positive', + 'intentional', + name='review_status'), + nullable=False), + sa.Column('author', sa.String(), nullable=False), + sa.Column('message', sa.Binary(), nullable=False), + sa.Column('date', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('bug_hash', name=op.f('pk_review_statuses')) ) - op.create_table('runs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('date', sa.DateTime(), nullable=True), - sa.Column('duration', sa.Integer(), nullable=True), - sa.Column('name', sa.String(), nullable=True), - sa.Column('version', sa.String(), nullable=True), - sa.Column('command', sa.String(), nullable=True), - sa.Column('can_delete', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_runs')), - sa.UniqueConstraint('name', name=op.f('uq_runs_name')) + + op.create_table( + 'runs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('date', sa.DateTime(), nullable=True), + sa.Column('duration', sa.Integer(), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('version', sa.String(), nullable=True), + sa.Column('command', sa.String(), nullable=True), + sa.Column('can_delete', sa.Boolean(), server_default=sa.text('true'), + nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_runs')), + sa.UniqueConstraint('name', name=op.f('uq_runs_name')) ) - op.create_table('files', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('filepath', sa.String(), nullable=False), - sa.Column('filename', sa.String(), nullable=False), - sa.Column('content_hash', sa.String(), nullable=True), - sa.ForeignKeyConstraint(['content_hash'], ['file_contents.content_hash'], name=op.f('fk_files_content_hash_file_contents'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_files')), - sa.UniqueConstraint('filepath', 'content_hash', name=op.f('uq_files_filepath')) + + op.create_table( + 'files', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('filepath', sa.String(), nullable=False), + sa.Column('filename', sa.String(), nullable=False), + sa.Column('content_hash', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['content_hash'], + ['file_contents.content_hash'], + name=op.f( + 'fk_files_content_hash_file_contents'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_files')), + sa.UniqueConstraint('filepath', 'content_hash', + name=op.f('uq_files_filepath')) ) - op.create_table('run_histories', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('run_id', sa.Integer(), nullable=True), - sa.Column('version_tag', sa.String(), nullable=True), - sa.Column('user', sa.String(), nullable=False), - sa.Column('time', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['run_id'], ['runs.id'], name=op.f('fk_run_histories_run_id_runs'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_run_histories')), - sa.UniqueConstraint('run_id', 'version_tag', name=op.f('uq_run_histories_run_id')) + + op.create_table( + 'run_histories', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('run_id', sa.Integer(), nullable=True), + sa.Column('version_tag', sa.String(), nullable=True), + sa.Column('user', sa.String(), nullable=False), + sa.Column('time', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['run_id'], ['runs.id'], + name=op.f('fk_run_histories_run_id_runs'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_run_histories')), + sa.UniqueConstraint('run_id', 'version_tag', + name=op.f('uq_run_histories_run_id')) ) - op.create_index(op.f('ix_run_histories_run_id'), 'run_histories', ['run_id'], unique=False) - op.create_table('reports', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('file_id', sa.Integer(), nullable=True), - sa.Column('run_id', sa.Integer(), nullable=True), - sa.Column('bug_id', sa.String(), nullable=True), - sa.Column('checker_id', sa.String(), nullable=True), - sa.Column('checker_cat', sa.String(), nullable=True), - sa.Column('bug_type', sa.String(), nullable=True), - sa.Column('severity', sa.Integer(), nullable=True), - sa.Column('line', sa.Integer(), nullable=True), - sa.Column('column', sa.Integer(), nullable=True), - sa.Column('checker_message', sa.String(), nullable=True), - sa.Column('detection_status', sa.Enum('new', 'unresolved', 'resolved', 'reopened', name='detection_status'), nullable=True), - sa.Column('detected_at', sa.DateTime(), nullable=False), - sa.Column('fixed_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['file_id'], ['files.id'], name=op.f('fk_reports_file_id_files'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.ForeignKeyConstraint(['run_id'], ['runs.id'], name=op.f('fk_reports_run_id_runs'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.PrimaryKeyConstraint('id', name=op.f('pk_reports')) + op.create_index(op.f('ix_run_histories_run_id'), 'run_histories', + ['run_id'], unique=False) + + op.create_table( + 'reports', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('file_id', sa.Integer(), nullable=True), + sa.Column('run_id', sa.Integer(), nullable=True), + sa.Column('bug_id', sa.String(), nullable=True), + sa.Column('checker_id', sa.String(), nullable=True), + sa.Column('checker_cat', sa.String(), nullable=True), + sa.Column('bug_type', sa.String(), nullable=True), + sa.Column('severity', sa.Integer(), nullable=True), + sa.Column('line', sa.Integer(), nullable=True), + sa.Column('column', sa.Integer(), nullable=True), + sa.Column('checker_message', sa.String(), nullable=True), + sa.Column('detection_status', sa.Enum('new', + 'unresolved', + 'resolved', + 'reopened', + name='detection_status'), + nullable=True), + sa.Column('detected_at', sa.DateTime(), nullable=False), + sa.Column('fixed_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['file_id'], ['files.id'], + name=op.f('fk_reports_file_id_files'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.ForeignKeyConstraint(['run_id'], ['runs.id'], + name=op.f('fk_reports_run_id_runs'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_reports')) ) - op.create_index(op.f('ix_reports_bug_id'), 'reports', ['bug_id'], unique=False) - op.create_index(op.f('ix_reports_run_id'), 'reports', ['run_id'], unique=False) - op.create_table('bug_path_events', - sa.Column('line_begin', sa.Integer(), nullable=True), - sa.Column('col_begin', sa.Integer(), nullable=True), - sa.Column('line_end', sa.Integer(), nullable=True), - sa.Column('col_end', sa.Integer(), nullable=True), - sa.Column('order', sa.Integer(), nullable=False), - sa.Column('msg', sa.String(), nullable=True), - sa.Column('file_id', sa.Integer(), nullable=True), - sa.Column('report_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['file_id'], ['files.id'], name=op.f('fk_bug_path_events_file_id_files'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.ForeignKeyConstraint(['report_id'], ['reports.id'], name=op.f('fk_bug_path_events_report_id_reports'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.PrimaryKeyConstraint('order', 'report_id', name=op.f('pk_bug_path_events')) + op.create_index(op.f('ix_reports_bug_id'), 'reports', ['bug_id'], + unique=False) + op.create_index(op.f('ix_reports_run_id'), 'reports', ['run_id'], + unique=False) + + op.create_table( + 'bug_path_events', + sa.Column('line_begin', sa.Integer(), nullable=True), + sa.Column('col_begin', sa.Integer(), nullable=True), + sa.Column('line_end', sa.Integer(), nullable=True), + sa.Column('col_end', sa.Integer(), nullable=True), + sa.Column('order', sa.Integer(), nullable=False), + sa.Column('msg', sa.String(), nullable=True), + sa.Column('file_id', sa.Integer(), nullable=True), + sa.Column('report_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['file_id'], ['files.id'], + name=op.f('fk_bug_path_events_file_id_files'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.ForeignKeyConstraint(['report_id'], ['reports.id'], + name=op.f( + 'fk_bug_path_events_report_id_reports'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.PrimaryKeyConstraint('order', 'report_id', + name=op.f('pk_bug_path_events')) ) - op.create_index(op.f('ix_bug_path_events_file_id'), 'bug_path_events', ['file_id'], unique=False) - op.create_table('bug_report_points', - sa.Column('line_begin', sa.Integer(), nullable=True), - sa.Column('col_begin', sa.Integer(), nullable=True), - sa.Column('line_end', sa.Integer(), nullable=True), - sa.Column('col_end', sa.Integer(), nullable=True), - sa.Column('order', sa.Integer(), nullable=False), - sa.Column('file_id', sa.Integer(), nullable=True), - sa.Column('report_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['file_id'], ['files.id'], name=op.f('fk_bug_report_points_file_id_files'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.ForeignKeyConstraint(['report_id'], ['reports.id'], name=op.f('fk_bug_report_points_report_id_reports'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), - sa.PrimaryKeyConstraint('order', 'report_id', name=op.f('pk_bug_report_points')) + op.create_index(op.f('ix_bug_path_events_file_id'), 'bug_path_events', + ['file_id'], unique=False) + + op.create_table( + 'bug_report_points', + sa.Column('line_begin', sa.Integer(), nullable=True), + sa.Column('col_begin', sa.Integer(), nullable=True), + sa.Column('line_end', sa.Integer(), nullable=True), + sa.Column('col_end', sa.Integer(), nullable=True), + sa.Column('order', sa.Integer(), nullable=False), + sa.Column('file_id', sa.Integer(), nullable=True), + sa.Column('report_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['file_id'], ['files.id'], + name=op.f( + 'fk_bug_report_points_file_id_files'), + ondelete='CASCADE', initially='DEFERRED', + deferrable=True), + sa.ForeignKeyConstraint( + ['report_id'], ['reports.id'], + name=op.f('fk_bug_report_points_report_id_reports'), + ondelete='CASCADE', initially='DEFERRED', deferrable=True), + sa.PrimaryKeyConstraint('order', 'report_id', + name=op.f('pk_bug_report_points')) ) - op.create_index(op.f('ix_bug_report_points_file_id'), 'bug_report_points', ['file_id'], unique=False) - # ### end Alembic commands ### + op.create_index(op.f('ix_bug_report_points_file_id'), 'bug_report_points', + ['file_id'], unique=False) def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_bug_report_points_file_id'), table_name='bug_report_points') + op.drop_index(op.f('ix_bug_report_points_file_id'), + table_name='bug_report_points') op.drop_table('bug_report_points') - op.drop_index(op.f('ix_bug_path_events_file_id'), table_name='bug_path_events') + op.drop_index(op.f('ix_bug_path_events_file_id'), + table_name='bug_path_events') op.drop_table('bug_path_events') - op.drop_index(op.f('ix_reports_run_id'), table_name='reports') - op.drop_index(op.f('ix_reports_bug_id'), table_name='reports') + op.drop_index(op.f('ix_reports_run_id'), + table_name='reports') + op.drop_index(op.f('ix_reports_bug_id'), + table_name='reports') op.drop_table('reports') - op.drop_index(op.f('ix_run_histories_run_id'), table_name='run_histories') + op.drop_index(op.f('ix_run_histories_run_id'), + table_name='run_histories') op.drop_table('run_histories') op.drop_table('files') op.drop_table('runs') @@ -148,4 +216,3 @@ def downgrade(): op.drop_table('db_version') op.drop_index(op.f('ix_comments_bug_hash'), table_name='comments') op.drop_table('comments') - # ### end Alembic commands ### diff --git a/web/server/codechecker_server/migrations/report/versions/9987aa593ca7_add_codechecker_version_to_run_history.py b/web/server/codechecker_server/migrations/report/versions/9987aa593ca7_add_codechecker_version_to_run_history.py index 5e3764acdc..478f1b9074 100644 --- a/web/server/codechecker_server/migrations/report/versions/9987aa593ca7_add_codechecker_version_to_run_history.py +++ b/web/server/codechecker_server/migrations/report/versions/9987aa593ca7_add_codechecker_version_to_run_history.py @@ -1,20 +1,21 @@ -"""Add CodeChecker version to run history +""" +Add CodeChecker version to run_history Revision ID: 9987aa593ca7 -Revises: e89887e7d3f0 +Revises: e89887e7d3f0 Create Date: 2018-09-05 17:43:42.099167 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '9987aa593ca7' down_revision = 'e89887e7d3f0' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('run_histories', diff --git a/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py b/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py index 85dc5dc376..4686659d7d 100644 --- a/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py +++ b/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py @@ -1,32 +1,39 @@ -"""Report annotations +""" +Report annotations Revision ID: 9d956a0fae8d -Revises: 75ae226b5d88 +Revises: 75ae226b5d88 Create Date: 2023-02-09 17:45:56.162040 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + +# Revision identifiers, used by Alembic. revision = '9d956a0fae8d' down_revision = '75ae226b5d88' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('report_annotations', - sa.Column('report_id', sa.Integer(), nullable=False), - sa.Column('key', sa.String(), nullable=False), - sa.Column('value', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['report_id'], ['reports.id'], name=op.f('fk_report_annotations_report_id_reports'), ondelete='CASCADE'), - sa.PrimaryKeyConstraint('report_id', 'key', name=op.f('pk_report_annotations')) + op.create_table( + 'report_annotations', + sa.Column('report_id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=False), + sa.ForeignKeyConstraint( + ['report_id'], ['reports.id'], + name=op.f('fk_report_annotations_report_id_reports'), + ondelete='CASCADE'), + sa.PrimaryKeyConstraint('report_id', 'key', + name=op.f('pk_report_annotations')) ) - op.create_index(op.f('ix_report_annotations_report_id'), 'report_annotations', ['report_id'], unique=False) + op.create_index(op.f('ix_report_annotations_report_id'), + 'report_annotations', ['report_id'], unique=False) def downgrade(): - op.drop_index(op.f('ix_report_annotations_report_id'), table_name='report_annotations') + op.drop_index(op.f('ix_report_annotations_report_id'), + table_name='report_annotations') op.drop_table('report_annotations') diff --git a/web/server/codechecker_server/migrations/report/versions/__init__.py b/web/server/codechecker_server/migrations/report/versions/__init__.py index e69de29bb2..4259749345 100644 --- a/web/server/codechecker_server/migrations/report/versions/__init__.py +++ b/web/server/codechecker_server/migrations/report/versions/__init__.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# +# Part of the CodeChecker project, under the Apache License v2.0 with +# LLVM Exceptions. See LICENSE for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +# +# ------------------------------------------------------------------------- diff --git a/web/server/codechecker_server/migrations/report/versions/a24461972d2e_add_index_for_report_and_history_id_columns.py b/web/server/codechecker_server/migrations/report/versions/a24461972d2e_add_index_for_report_and_history_id_columns.py index 6b72e10318..2c8b9dad03 100644 --- a/web/server/codechecker_server/migrations/report/versions/a24461972d2e_add_index_for_report_and_history_id_columns.py +++ b/web/server/codechecker_server/migrations/report/versions/a24461972d2e_add_index_for_report_and_history_id_columns.py @@ -1,19 +1,20 @@ -"""Add index for report and history id columns +""" +Add index for report and history id columns Revision ID: a24461972d2e -Revises: dabc6998b8f0 +Revises: dabc6998b8f0 Create Date: 2021-06-10 15:38:59.504534 - """ -# revision identifiers, used by Alembic. +from alembic import op + + +# Revision identifiers, used by Alembic. revision = 'a24461972d2e' down_revision = 'dabc6998b8f0' branch_labels = None depends_on = None -from alembic import op - def upgrade(): op.create_index( diff --git a/web/server/codechecker_server/migrations/report/versions/a79677f54e48_remove_db_version_table.py b/web/server/codechecker_server/migrations/report/versions/a79677f54e48_remove_db_version_table.py index 3a15a2d654..9ed7c6e97f 100644 --- a/web/server/codechecker_server/migrations/report/versions/a79677f54e48_remove_db_version_table.py +++ b/web/server/codechecker_server/migrations/report/versions/a79677f54e48_remove_db_version_table.py @@ -1,20 +1,21 @@ -"""Remove db version table +""" +Remove db_version table Revision ID: a79677f54e48 -Revises: 6cb6a3a41967 +Revises: 6cb6a3a41967 Create Date: 2020-03-13 12:14:19.805990 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'a79677f54e48' down_revision = '6cb6a3a41967' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.drop_table('db_version') diff --git a/web/server/codechecker_server/migrations/report/versions/ad2a567e513a_git_blame_info.py b/web/server/codechecker_server/migrations/report/versions/ad2a567e513a_git_blame_info.py index ee6aeb8d41..f9b2ab2cae 100644 --- a/web/server/codechecker_server/migrations/report/versions/ad2a567e513a_git_blame_info.py +++ b/web/server/codechecker_server/migrations/report/versions/ad2a567e513a_git_blame_info.py @@ -1,20 +1,21 @@ -"""Git blame info +""" +Git blame info Revision ID: ad2a567e513a -Revises: f8291ab1d6be +Revises: f8291ab1d6be Create Date: 2020-12-17 18:08:50.322381 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'ad2a567e513a' down_revision = 'f8291ab1d6be' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('files', diff --git a/web/server/codechecker_server/migrations/report/versions/af5d8a21c1e4_add_analyzer_name_for_report.py b/web/server/codechecker_server/migrations/report/versions/af5d8a21c1e4_add_analyzer_name_for_report.py index 026e859b92..8539b91a51 100644 --- a/web/server/codechecker_server/migrations/report/versions/af5d8a21c1e4_add_analyzer_name_for_report.py +++ b/web/server/codechecker_server/migrations/report/versions/af5d8a21c1e4_add_analyzer_name_for_report.py @@ -1,20 +1,20 @@ -"""Add analyzer name for report +""" +Add analyzer_name for report Revision ID: af5d8a21c1e4 -Revises: 2185167f8568 +Revises: 2185167f8568 Create Date: 2020-08-14 11:26:01.806877 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + +# Revision identifiers, used by Alembic. revision = 'af5d8a21c1e4' down_revision = '2185167f8568' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.add_column('reports', sa.Column('analyzer_name', @@ -22,5 +22,6 @@ def upgrade(): server_default='unknown', nullable=False)) + def downgrade(): op.drop_column('reports', 'analyzer_name') diff --git a/web/server/codechecker_server/migrations/report/versions/c042e02cca99_extended_report_data.py b/web/server/codechecker_server/migrations/report/versions/c042e02cca99_extended_report_data.py index 061a34ac3c..c2125baed8 100644 --- a/web/server/codechecker_server/migrations/report/versions/c042e02cca99_extended_report_data.py +++ b/web/server/codechecker_server/migrations/report/versions/c042e02cca99_extended_report_data.py @@ -1,20 +1,21 @@ -"""Extended report data +""" +Extended report data Revision ID: 40112fd406e3 -Revises: 39f9e96071c0 +Revises: 39f9e96071c0 Create Date: 2018-11-15 11:06:36.318406 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = '40112fd406e3' down_revision = '39f9e96071c0' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): op.create_table('extended_report_data', @@ -31,18 +32,24 @@ def upgrade(): sa.Column('line_end', sa.Integer(), nullable=True), sa.Column('col_end', sa.Integer(), nullable=True), sa.Column('message', sa.String(), nullable=True), - sa.ForeignKeyConstraint(['file_id'], ['files.id'], + sa.ForeignKeyConstraint( + ['file_id'], ['files.id'], name=op.f('fk_extended_report_data_file_id_files'), ondelete='CASCADE', initially='DEFERRED', - deferrable=True), - sa.ForeignKeyConstraint(['report_id'], ['reports.id'], + deferrable=True + ), + sa.ForeignKeyConstraint( + ['report_id'], ['reports.id'], name=op.f('fk_extended_report_data_report_id_reports'), ondelete='CASCADE', initially='DEFERRED', - deferrable=True), - sa.PrimaryKeyConstraint('id', - name=op.f('pk_extended_report_data'))) + deferrable=True + ), + sa.PrimaryKeyConstraint( + 'id', + name=op.f('pk_extended_report_data')) + ) op.create_index(op.f('ix_extended_report_data_file_id'), 'extended_report_data', ['file_id'], unique=False) diff --git a/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py b/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py index 905614ebe2..0c1da27454 100644 --- a/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py +++ b/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py @@ -1,18 +1,10 @@ -"""Store information about enabled and disabled checkers for a run +""" +Store information about enabled and disabled checkers for a run. Revision ID: c3dad71f8e6b -Revises: 9d956a0fae8d +Revises: 9d956a0fae8d Create Date: 2023-10-20 14:11:48.371981 - """ - -# revision identifiers, used by Alembic. -revision = 'c3dad71f8e6b' -down_revision = '9d956a0fae8d' -branch_labels = None -depends_on = None - - from logging import getLogger from typing import Dict, Tuple @@ -25,6 +17,12 @@ from codechecker_report_converter.report import FakeChecker, UnknownChecker +# Revision identifiers, used by Alembic. +revision = 'c3dad71f8e6b' +down_revision = '9d956a0fae8d' +branch_labels = None +depends_on = None + REPORT_UPDATE_CHUNK_SIZE = 1_000_000 @@ -33,7 +31,7 @@ def upgrade(): # uses the facilities that are sourced from the Alembic env.py. # Symbols created on the module-level are created *before* Alembic's env.py # had loaded. - LOG = getLogger("migration") + LOG = getLogger("migration/report") dialect = op.get_context().dialect.name conn = op.get_bind() @@ -233,38 +231,38 @@ def _print_progress(batch: int): db.execute(f""" UPDATE reports SET - checker_id = ( - SELECT checkers.id - FROM checkers - WHERE checkers.analyzer_name = reports.analyzer_name - AND checkers.checker_name = reports.checker_name - ) + checker_id = ( + SELECT checkers.id + FROM checkers + WHERE checkers.analyzer_name = reports.analyzer_name + AND checkers.checker_name = reports.checker_name + ) WHERE reports.id IN ( - SELECT reports.id - FROM reports - WHERE reports.checker_id = {fake_chk_id} - AND reports.analyzer_name != '{FakeChecker[0]}' - AND reports.checker_name != '{FakeChecker[1]}' - LIMIT {REPORT_UPDATE_CHUNK_SIZE} - ) + SELECT reports.id + FROM reports + WHERE reports.checker_id = {fake_chk_id} + AND reports.analyzer_name != '{FakeChecker[0]}' + AND reports.checker_name != '{FakeChecker[1]}' + LIMIT {REPORT_UPDATE_CHUNK_SIZE} + ) ; """) else: db.execute(f""" UPDATE reports SET - checker_id = checkers.id + checker_id = checkers.id FROM checkers WHERE checkers.analyzer_name = reports.analyzer_name - AND checkers.checker_name = reports.checker_name - AND reports.id IN ( - SELECT reports.id - FROM reports - WHERE reports.checker_id = {fake_chk_id} - AND reports.analyzer_name != '{FakeChecker[0]}' - AND reports.checker_name != '{FakeChecker[1]}' - LIMIT {REPORT_UPDATE_CHUNK_SIZE} - ) + AND checkers.checker_name = reports.checker_name + AND reports.id IN ( + SELECT reports.id + FROM reports + WHERE reports.checker_id = {fake_chk_id} + AND reports.analyzer_name != '{FakeChecker[0]}' + AND reports.checker_name != '{FakeChecker[1]}' + LIMIT {REPORT_UPDATE_CHUNK_SIZE} + ) ; """) _print_progress(i + 1) @@ -399,7 +397,7 @@ def upgrade_reports_table_constraints(): def downgrade(): - LOG = getLogger("migration") + LOG = getLogger("migration/report") dialect = op.get_context().dialect.name conn = op.get_bind() @@ -505,17 +503,17 @@ def _print_progress(batch: int): db.execute(f""" UPDATE reports SET - (analyzer_name, checker_id, severity, - checker_id_lookup) = + (analyzer_name, checker_id, severity, + checker_id_lookup) = (SELECT analyzer_name, checker_name, severity, '{fake_chk_id}' - FROM checkers - WHERE checkers.id = reports.checker_id_lookup) + FROM checkers + WHERE checkers.id = reports.checker_id_lookup) WHERE reports.id IN ( - SELECT reports.id - FROM reports - WHERE reports.checker_id_lookup != {fake_chk_id} - LIMIT {REPORT_UPDATE_CHUNK_SIZE} + SELECT reports.id + FROM reports + WHERE reports.checker_id_lookup != {fake_chk_id} + LIMIT {REPORT_UPDATE_CHUNK_SIZE} ) ; """) diff --git a/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py b/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py index 80ad54ef22..4023c1d86d 100644 --- a/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py +++ b/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py @@ -1,56 +1,62 @@ -"""Analysis info table +""" +Analysis info table Revision ID: dabc6998b8f0 -Revises: af5d8a21c1e4 +Revises: af5d8a21c1e4 Create Date: 2021-05-13 12:05:55.983746 - """ +from logging import getLogger + +from alembic import op +import sqlalchemy as sa -# revision identifiers, used by Alembic. + +# Revision identifiers, used by Alembic. revision = 'dabc6998b8f0' down_revision = 'af5d8a21c1e4' branch_labels = None depends_on = None -from logging import getLogger - -from alembic import op -import sqlalchemy as sa - def upgrade(): - LOG = getLogger("migration") + LOG = getLogger("migration/report") conn = op.get_bind() ctx = op.get_context() dialect = ctx.dialect.name - analysis_info_tbl = op.create_table('analysis_info', + analysis_info_tbl = op.create_table( + 'analysis_info', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('analyzer_command', sa.Binary(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_analysis_info')) ) - run_history_analysis_info_tbl = op.create_table('run_history_analysis_info', + run_history_analysis_info_tbl = op.create_table( + 'run_history_analysis_info', sa.Column('run_history_id', sa.Integer(), nullable=True), sa.Column('analysis_info_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['analysis_info_id'], ['analysis_info.id'], - name=op.f('fk_run_history_analysis_info_analysis_info_id_analysis_info')), + name=op.f('fk_run_history_analysis_info_analysis_info_id_' + 'analysis_info')), sa.ForeignKeyConstraint( ['run_history_id'], ['run_histories.id'], - name=op.f('fk_run_history_analysis_info_run_history_id_run_histories'), + name=op.f('fk_run_history_analysis_info_run_history_id_' + 'run_histories'), ondelete='CASCADE', initially='DEFERRED', deferrable=True) ) - report_analysis_info_tbl = op.create_table('report_analysis_info', + report_analysis_info_tbl = op.create_table( + 'report_analysis_info', sa.Column('report_id', sa.Integer(), nullable=True), sa.Column('analysis_info_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['analysis_info_id'], ['analysis_info.id'], - name=op.f('fk_report_analysis_info_analysis_info_id_analysis_info')), + name=op.f('fk_report_analysis_info_analysis_info_id_' + 'analysis_info')), sa.ForeignKeyConstraint( ['report_id'], ['reports.id'], @@ -69,7 +75,8 @@ def upgrade(): run_analysis_info = {} analysis_info = [] run_history_analysis_info = [] - for ai_id, (run_history_id, run_id, analyzer_cmd) in enumerate(run_histories, start=1): + for ai_id, (run_history_id, run_id, analyzer_cmd) in \ + enumerate(run_histories, start=1): if analyzer_cmd not in uniqued_analysis_info: uniqued_analysis_info[analyzer_cmd] = ai_id analysis_info.append({ @@ -78,7 +85,8 @@ def upgrade(): }) if run_id not in run_analysis_info: - run_analysis_info[run_id] = uniqued_analysis_info[analyzer_cmd] + run_analysis_info[run_id] = \ + uniqued_analysis_info[analyzer_cmd] run_history_analysis_info.append({ 'run_history_id': run_history_id, @@ -90,7 +98,7 @@ def upgrade(): op.bulk_insert( run_history_analysis_info_tbl, run_history_analysis_info) - except: + except Exception: LOG.error("Analyzer command data migration failed!") else: # If data migration was successfully finished we can remove the @@ -98,11 +106,11 @@ def upgrade(): if dialect == 'sqlite': # Unfortunately removing columns in SQLite is not supported until # version 3.35.0 (only Ubuntu 22.04's default version). Using the - # 'batch_alter_table' function can be used to remove a column here (it - # will create a new database) but it will clear the table which have - # foreign keys with cascade delete property. Unfortunately disabling - # the pragma foreign key doesn't work here. For this reason we will - # keep these columns in case of SQLite. + # 'batch_alter_table' function can be used to remove a column here + # (it will create a new database) but it will clear the table which + # have foreign keys with cascade delete property. Unfortunately + # disabling the pragma foreign key doesn't work here. For this + # reason, we will keep these columns in case of SQLite. # with op.batch_alter_table('run_histories') as batch_op: # batch_op.drop_column('check_command') @@ -110,8 +118,8 @@ def upgrade(): # with op.batch_alter_table( # 'runs', # reflect_args=[ - # # By default it we don't override the definition of this column - # # we will get the following exception: + # # By default it we don't override the definition of this + # # column we will get the following exception: # # (sqlite3.OperationalError) default value of column # # [can_delete] is not constant # sa.Column( diff --git a/web/server/codechecker_server/migrations/report/versions/dd9c97ead24_share_the_locking_of_runs.py b/web/server/codechecker_server/migrations/report/versions/dd9c97ead24_share_the_locking_of_runs.py index c8eba2f1f4..6affa3abab 100644 --- a/web/server/codechecker_server/migrations/report/versions/dd9c97ead24_share_the_locking_of_runs.py +++ b/web/server/codechecker_server/migrations/report/versions/dd9c97ead24_share_the_locking_of_runs.py @@ -1,26 +1,25 @@ -"""Share the locking of runs across servers via database +""" +Share the locking of runs across servers via database Revision ID: dd9c97ead24 -Revises: 4b38fa14c27b +Revises: 4b38fa14c27b Create Date: 2017-11-17 15:44:07.810579 - """ +from alembic import op +import sqlalchemy as sa - -# revision identifiers, used by Alembic. +# Revision identifiers, used by Alembic. revision = 'dd9c97ead24' down_revision = '4b38fa14c27b' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('run_locks', + op.create_table( + 'run_locks', sa.Column('name', sa.String(), nullable=False), sa.Column('locked_at', sa.DateTime(), nullable=False), sa.Column('username', sa.String(), nullable=True), diff --git a/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py b/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py index a94731c48a..8eab7c2c90 100644 --- a/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py +++ b/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py @@ -1,31 +1,35 @@ -"""Add bug path length +""" +Add bug path length Revision ID: e89887e7d3f0 -Revises: 3793e361a752 +Revises: 3793e361a752 Create Date: 2018-08-28 11:11:07.533906 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'e89887e7d3f0' down_revision = '3793e361a752' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa def upgrade(): - op.add_column('reports', - sa.Column('path_length', sa.Integer(), nullable=True, default=0)) + sa.Column('path_length', sa.Integer(), + nullable=True, default=0)) conn = op.get_bind() conn.execute(""" UPDATE reports SET path_length = - (SELECT COUNT(bug_path_events.report_id) from bug_path_events where bug_path_events.report_id = reports.id) + (SELECT COUNT(bug_path_events.report_id) + FROM bug_path_events + WHERE bug_path_events.report_id = reports.id) """) diff --git a/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py b/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py index 2a0b5163fd..78138e3d0d 100644 --- a/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py +++ b/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py @@ -1,19 +1,20 @@ -"""Fix setting analysis_info_id_seq +""" +Fix setting analysis_info_id_seq Revision ID: f8291ab1d6be -Revises: a24461972d2e +Revises: a24461972d2e Create Date: 2021-07-15 16:49:05.354455 - """ -# revision identifiers, used by Alembic. +from alembic import op + + +# Revision identifiers, used by Alembic. revision = 'f8291ab1d6be' down_revision = 'a24461972d2e' branch_labels = None depends_on = None -from alembic import op - def upgrade(): ctx = op.get_context() @@ -26,3 +27,8 @@ def upgrade(): (SELECT MAX(id) + 1 FROM analysis_info) ) """) + + +def downgrade(): + # Allow downgrading this revision, but this requires no actions. + pass diff --git a/web/server/codechecker_server/migrations/report/versions/fb356f0eefed_cleanup_plan.py b/web/server/codechecker_server/migrations/report/versions/fb356f0eefed_cleanup_plan.py index e54834c55e..aa3f84eb4d 100644 --- a/web/server/codechecker_server/migrations/report/versions/fb356f0eefed_cleanup_plan.py +++ b/web/server/codechecker_server/migrations/report/versions/fb356f0eefed_cleanup_plan.py @@ -1,23 +1,25 @@ -"""Cleanup plan +""" +Cleanup plan Revision ID: fb356f0eefed -Revises: ad2a567e513a +Revises: ad2a567e513a Create Date: 2021-09-06 10:55:43.093729 - """ -# revision identifiers, used by Alembic. +from alembic import op +import sqlalchemy as sa + + +# Revision identifiers, used by Alembic. revision = 'fb356f0eefed' down_revision = 'ad2a567e513a' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): - op.create_table('cleanup_plans', + op.create_table( + 'cleanup_plans', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('due_date', sa.DateTime(), nullable=True), @@ -27,13 +29,15 @@ def upgrade(): sa.UniqueConstraint('name', name=op.f('uq_cleanup_plans_name')) ) - op.create_table('cleanup_plan_report_hashes', + op.create_table( + 'cleanup_plan_report_hashes', sa.Column('cleanup_plan_id', sa.Integer(), nullable=True), sa.Column('bug_hash', sa.String(), nullable=False), sa.ForeignKeyConstraint( ['cleanup_plan_id'], ['cleanup_plans.id'], - name=op.f('fk_cleanup_plan_report_hashes_cleanup_plan_id_cleanup_plans'), + name=op.f('fk_cleanup_plan_report_hashes_cleanup_plan_id_' + 'cleanup_plans'), ondelete='CASCADE', initially='DEFERRED', deferrable=True), diff --git a/web/server/codechecker_server/routing.py b/web/server/codechecker_server/routing.py index 8f81ec2369..c993a0915f 100644 --- a/web/server/codechecker_server/routing.py +++ b/web/server/codechecker_server/routing.py @@ -41,6 +41,12 @@ def is_valid_product_endpoint(uripart): Returns whether or not the given URI part is to be considered a valid product name. """ + # FIXME: Endpoint "all" should be disallowed, as commit + # fd59927013d5482ff10e80994511971770753d0c in Dec 2017 added the ability + # for "CodeChecker server" to specify "--db-status all" and + # "--db-upgrade-schema all" for the case where *every* product needs to + # be checked/upgraded, essentially blocking the ability to status-check + # or schema migrate the product at the endpoint literal "all". # There are some forbidden keywords. if uripart in NON_PRODUCT_ENDPOINTS: diff --git a/web/server/codechecker_server/server.py b/web/server/codechecker_server/server.py index 618c0cf8a7..e638d07d1f 100644 --- a/web/server/codechecker_server/server.py +++ b/web/server/codechecker_server/server.py @@ -13,7 +13,10 @@ import atexit import datetime +from functools import partial from hashlib import sha256 +from http.server import HTTPServer, BaseHTTPRequestHandler, \ + SimpleHTTPRequestHandler import os import posixpath from random import sample @@ -23,12 +26,10 @@ import ssl import sys import stat +from typing import List, Optional, Tuple import urllib -from http.server import HTTPServer, BaseHTTPRequestHandler, \ - SimpleHTTPRequestHandler import multiprocess - from sqlalchemy.orm import sessionmaker from sqlalchemy.sql.expression import func from thrift.protocol import TJSONProtocol @@ -48,17 +49,15 @@ from codechecker_api.ServerInfo_v6 import \ serverInfoService as ServerInfoAPI_v6 +from codechecker_common import util from codechecker_common.logger import get_logger +from codechecker_common.compatibility.multiprocessing import \ + Pool, cpu_count +from codechecker_web.shared import database_status from codechecker_web.shared.version import get_version_str -from . import instance_manager -from . import permissions -from . import routing -from . import session_manager - -from .tmp import get_tmp_dir_hash - +from . import instance_manager, permissions, routing, session_manager from .api.authentication import ThriftAuthHandler as AuthHandler_v6 from .api.config_handler import ThriftConfigHandler as ConfigHandler_v6 from .api.product_server import ThriftProductHandler as ProductHandler_v6 @@ -70,6 +69,8 @@ Configuration as ORMConfiguration from .database.database import DBSession from .database.run_db_model import IDENTIFIER as RUN_META, Run, RunLock +from .tmp import get_tmp_dir_hash + LOG = get_logger('server') @@ -511,14 +512,15 @@ class Product: # connect() call so the next could be made. CONNECT_RETRY_TIMEOUT = 300 - def __init__(self, orm_object, context, check_env): + def __init__(self, id_: int, endpoint: str, display_name: str, + connection_string: str, context, check_env): """ Set up a new managed product object for the configuration given. """ - self.__id = orm_object.id - self.__endpoint = orm_object.endpoint - self.__connection_string = orm_object.connection - self.__display_name = orm_object.display_name + self.__id = id_ + self.__endpoint = endpoint + self.__display_name = display_name + self.__connection_string = connection_string self.__driver_name = None self.__context = context self.__check_env = check_env @@ -585,11 +587,11 @@ def connect(self, init_db=False): Each time the connect is called the db_status is updated. """ - LOG.debug("Checking '%s' database.", self.endpoint) sql_server = database.SQLServer.from_connection_string( self.__connection_string, + self.__endpoint, RUN_META, self.__context.run_migration_root, interactive=False, @@ -666,18 +668,85 @@ def cleanup_run_db(self): """ Cleanup the run database which belongs to this product. """ - LOG.info("Garbage collection for product '%s' started...", - self.endpoint) + LOG.info("[%s] Garbage collection started...", self.endpoint) - db_cleanup.remove_expired_data(self.session_factory) - db_cleanup.remove_unused_data(self.session_factory) - db_cleanup.update_contextual_data(self.session_factory, - self.__context) + db_cleanup.remove_expired_data(self) + db_cleanup.remove_unused_data(self) + db_cleanup.update_contextual_data(self, self.__context) - LOG.info("Garbage collection finished.") + LOG.info("[%s] Garbage collection finished.", self.endpoint) return True +def _do_db_cleanup(context, check_env, + id_: int, endpoint: str, display_name: str, + connection_str: str) -> Tuple[Optional[bool], str]: + # This functions is a concurrent job handler! + try: + prod = Product(id_, endpoint, display_name, connection_str, + context, check_env) + prod.connect(init_db=False) + if prod.db_status != DBStatus.OK: + status_str = database_status.db_status_msg.get(prod.db_status) + return None, \ + f"Cleanup not attempted, database status is \"{status_str}\"" + + prod.cleanup_run_db() + prod.teardown() + + # Result is hard-wired to True, because the db_cleanup routines + # swallow and log the potential errors but do not return them. + return True, "" + except Exception as e: + import traceback + traceback.print_exc() + return False, str(e) + + +def _do_db_cleanups(config_database, context, check_env) \ + -> Tuple[bool, List[Tuple[str, str]]]: + """ + Performs on-demand start-up database cleanup on all the products present + in the ``config_database``. + + Returns whether database clean-up succeeded for all products, and the + list of products for which it failed, along with the failure reason. + """ + def _get_products() -> List[Product]: + products = list() + cfg_engine = config_database.create_engine() + cfg_session_factory = sessionmaker(bind=cfg_engine) + with DBSession(cfg_session_factory) as cfg_db: + for row in cfg_db.query(ORMProduct) \ + .order_by(ORMProduct.endpoint.asc()) \ + .all(): + products.append((row.id, row.endpoint, row.display_name, + row.connection)) + cfg_engine.dispose() + return products + + products = _get_products() + if not products: + return True, list() + + thr_count = util.clamp(1, len(products), cpu_count()) + overall_result, failures = True, list() + with Pool(max_workers=thr_count) as executor: + LOG.info("Performing database cleanup using %d concurrent jobs...", + thr_count) + for product, result in \ + zip(products, executor.map( + partial(_do_db_cleanup, context, check_env), + *zip(*products))): + success, reason = result + if not success: + _, endpoint, _, _ = product + overall_result = False + failures.append((endpoint, reason)) + + return overall_result, failures + + class CCSimpleHttpServer(HTTPServer): """ Simple http server to handle requests from the clients. @@ -691,7 +760,6 @@ def __init__(self, RequestHandlerClass, config_directory, product_db_sql_server, - skip_db_cleanup, pckg_data, context, check_env, @@ -729,11 +797,6 @@ def __init__(self, cfg_sess.commit() cfg_sess.close() - if not skip_db_cleanup: - for endpoint, product in self.__products.items(): - if not product.cleanup_run_db(): - LOG.warning("Cleaning database for %s Failed.", endpoint) - try: HTTPServer.__init__(self, server_address, RequestHandlerClass, @@ -833,7 +896,10 @@ def add_product(self, orm_product, init_db=False): LOG.debug("Setting up product '%s'", orm_product.endpoint) - prod = Product(orm_product, + prod = Product(orm_product.id, + orm_product.endpoint, + orm_product.display_name, + orm_product.connection, self.context, self.check_env) @@ -983,7 +1049,7 @@ def __make_root_file(root_file): def start_server(config_directory, package_data, port, config_sql_server, - listen_address, force_auth, skip_db_cleanup, + listen_address, force_auth, skip_db_cleanup: bool, context, check_env): """ Start http server to handle web client and thrift requests. @@ -1043,6 +1109,20 @@ def start_server(config_directory, package_data, port, config_sql_server, LOG.error("The server's configuration file is invalid!") sys.exit(1) + if not skip_db_cleanup: + all_success, fails = _do_db_cleanups(config_sql_server, + context, + check_env) + if not all_success: + LOG.error("Failed to perform automatic cleanup on %d products! " + "Earlier logs might contain additional detailed " + "reasoning.\n\t* %s", len(fails), + "\n\t* ".join( + ("'%s' (%s)" % (ep, reason) for (ep, reason) in fails) + )) + else: + LOG.debug("Skipping db_cleanup, as requested.") + server_clazz = CCSimpleHttpServer if ':' in server_addr[0]: # IPv6 address specified for listening. @@ -1054,7 +1134,6 @@ def start_server(config_directory, package_data, port, config_sql_server, RequestHandler, config_directory, config_sql_server, - skip_db_cleanup, package_data, context, check_env, diff --git a/web/server/codechecker_server/session_manager.py b/web/server/codechecker_server/session_manager.py index e553e69d37..5a3412e837 100644 --- a/web/server/codechecker_server/session_manager.py +++ b/web/server/codechecker_server/session_manager.py @@ -18,6 +18,7 @@ from datetime import datetime from typing import Optional +from codechecker_common.compatibility.multiprocessing import cpu_count from codechecker_common.logger import get_logger from codechecker_common.util import load_json @@ -60,7 +61,7 @@ def get_worker_processes(scfg_dict): Return 'worker_processes' field from the config dictionary or returns the default value if this field is not set or the value is negative. """ - default = os.cpu_count() + default = cpu_count() worker_processes = scfg_dict.get('worker_processes', default) if worker_processes < 0: diff --git a/web/tests/Makefile b/web/tests/Makefile index 5549a6bc63..33f0712a09 100644 --- a/web/tests/Makefile +++ b/web/tests/Makefile @@ -28,11 +28,11 @@ test: pycodestyle pylint test_unit test_functional test_in_env: pycodestyle_in_env pylint_in_env test_unit_in_env test_functional_novenv PYCODESTYLE_TEST_CMD = pycodestyle \ - --exclude=server/codechecker_server/migrations \ - codechecker_web tests \ + codechecker_web \ + tests \ client/codechecker_client \ - server/codechecker_server server/tests \ - tests + server/codechecker_server \ + server/tests pycodestyle: $(PYCODESTYLE_TEST_CMD) diff --git a/web/tests/libtest/codechecker.py b/web/tests/libtest/codechecker.py index 3cf31375ab..d952e32ea0 100644 --- a/web/tests/libtest/codechecker.py +++ b/web/tests/libtest/codechecker.py @@ -8,8 +8,7 @@ """ Helper commands to run CodeChecker in the tests easier. """ - - +from datetime import timedelta import json import os import shlex @@ -650,6 +649,7 @@ def start_or_get_server(auth_required=False): def wait_for_server_start(stdoutfile): print("Waiting for server start reading file " + stdoutfile) n = 0 + server_start_timeout = timedelta(minutes=5) while True: if os.path.isfile(stdoutfile): with open(stdoutfile, encoding="utf-8", errors="ignore") as f: @@ -662,6 +662,20 @@ def wait_for_server_start(stdoutfile): if "usage: CodeChecker" in out: return + if n > server_start_timeout.total_seconds(): + print("[FATAL!] Server failed to start after '%s' (%d seconds). " + "There is likely a major issue preventing startup!" + % (str(server_start_timeout), + server_start_timeout.total_seconds())) + if os.path.isfile(stdoutfile): + with open(stdoutfile) as f: + print("*** HERE FOLLOWS THE OUTPUT OF THE 'server' " + "COMMAND! ***") + print(f.read()) + print("*** END 'server' OUTPUT ***") + + raise TimeoutError("Server failed to start in a timely manner") + time.sleep(1) n += 1 print("Waiting for server to start for " + str(n) + " seconds...")