diff --git a/.isort.cfg b/.isort.cfg index fda143287..99ee3daa9 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,5 +1,5 @@ [settings] -known_third_party = attr,dill,faker,git,jinja2,mock,parse,pathos,pkg_resources,plumbum,psutil,pygit2,pygtrie,pyparsing,pytest,pytest_git,result,rich,schema,setuptools,six,sqlalchemy,yaml +known_third_party = attr,dill,faker,git,jinja2,mock,parse,plumbum,psutil,pygtrie,pyparsing,pytest,pytest_git,result,rich,schema,setuptools,six,sqlalchemy,yaml multi_line_output=3 use_parentheses = True include_trailing_comma: True diff --git a/.pylintrc b/.pylintrc index ca0a04832..5bf0efc70 100644 --- a/.pylintrc +++ b/.pylintrc @@ -263,4 +263,4 @@ int-import-graph= [EXCEPTIONS] -overgeneral-exceptions=Exception +overgeneral-exceptions=builtins.Exception diff --git a/benchbuild/__init__.py b/benchbuild/__init__.py index 0544e305d..9ff2ae338 100644 --- a/benchbuild/__init__.py +++ b/benchbuild/__init__.py @@ -28,8 +28,8 @@ def __init__() -> None: """Initialize all plugins and settings.""" - __PLUGINS__.discover() - __SETTINGS__.setup_config(CFG) + if __PLUGINS__.discover(): + __SETTINGS__.setup_config(CFG) __init__() diff --git a/benchbuild/environments/adapters/podman.py b/benchbuild/environments/adapters/podman.py index 84dbb3554..9448ebb8d 100644 --- a/benchbuild/environments/adapters/podman.py +++ b/benchbuild/environments/adapters/podman.py @@ -5,7 +5,6 @@ from plumbum import local, ProcessExecutionError from result import Result, Err, Ok from rich import print -from rich.markdown import Markdown from benchbuild.environments.adapters import buildah from benchbuild.environments.adapters.common import ( @@ -173,6 +172,9 @@ def _create( '--mount', f'type=bind,src={source},target={target}'] if interactive: + # pylint: disable=import-outside-toplevel + from rich.markdown import Markdown + entrypoint = buildah.find_entrypoint(image.name) print( Markdown( diff --git a/benchbuild/environments/service_layer/debug.py b/benchbuild/environments/service_layer/debug.py index d6bea6265..d08d0b070 100644 --- a/benchbuild/environments/service_layer/debug.py +++ b/benchbuild/environments/service_layer/debug.py @@ -1,6 +1,5 @@ from plumbum import ProcessExecutionError from rich import print -from rich.markdown import Markdown from benchbuild.environments.adapters.common import bb_buildah from benchbuild.environments.domain import events @@ -25,6 +24,8 @@ def debug_image_kept( """ Spawn a debug session of the kept image and provide diagnostics. """ + # pylint: disable=import-outside-toplevel + from rich.markdown import Markdown with uow: container = uow.create(event.image_name, event.failed_image_name) if container is None: diff --git a/benchbuild/extensions/compiler.py b/benchbuild/extensions/compiler.py index ad8d7aadc..5d1498c0a 100644 --- a/benchbuild/extensions/compiler.py +++ b/benchbuild/extensions/compiler.py @@ -6,11 +6,12 @@ from plumbum.commands.base import BoundCommand from benchbuild.extensions import base +from benchbuild.settings import CFG from benchbuild.utils import db, run if TYPE_CHECKING: - from benchbuild.project import Project from benchbuild.experiment import Experiment + from benchbuild.project import Project LOG = logging.getLogger(__name__) @@ -65,9 +66,10 @@ def __call__( default_flow_style=False ) ) - db.persist_config( - run_info.db_run, run_info.session, self.config - ) + if CFG["db"]["enabled"]: + db.persist_config( + run_info.db_run, run_info.session, self.config + ) if run_info.has_failed: with run.track_execution( diff --git a/benchbuild/extensions/run.py b/benchbuild/extensions/run.py index ff8ddd963..fbec1ba40 100644 --- a/benchbuild/extensions/run.py +++ b/benchbuild/extensions/run.py @@ -5,6 +5,7 @@ from plumbum import local from benchbuild.extensions import base +from benchbuild.settings import CFG from benchbuild.utils import db, run from benchbuild.utils.settings import get_number_of_jobs @@ -45,9 +46,10 @@ def __call__(self, binary_command, *args, **kwargs): ) self.config['baseline'] = \ os.getenv("BB_IS_BASELINE", "False") - db.persist_config( - run_info.db_run, run_info.session, self.config - ) + if CFG["db"]["enabled"]: + db.persist_config( + run_info.db_run, run_info.session, self.config + ) res = self.call_next(binary_command, *args, **kwargs) res.append(run_info) return res @@ -69,6 +71,7 @@ def __init__(self, *extensions, limit="10m", **kwargs): self.limit = limit def __call__(self, binary_command, *args, **kwargs): + # pylint: disable=import-outside-toplevel from benchbuild.utils.cmd import timeout return self.call_next( timeout[self.limit, binary_command], *args, **kwargs @@ -83,8 +86,6 @@ class SetThreadLimit(base.Extension): """ def __call__(self, binary_command, *args, **kwargs): - from benchbuild.settings import CFG - config = self.config if config is not None and 'jobs' in config.keys(): jobs = get_number_of_jobs(config) diff --git a/benchbuild/extensions/time.py b/benchbuild/extensions/time.py index 8976adbdd..defcddb6c 100644 --- a/benchbuild/extensions/time.py +++ b/benchbuild/extensions/time.py @@ -4,6 +4,7 @@ import parse from benchbuild.extensions import base +from benchbuild.settings import CFG from benchbuild.utils import db from benchbuild.utils.cmd import time @@ -20,6 +21,10 @@ def __call__(self, binary_command, *args, may_wrap=True, **kwargs): def handle_timing(run_infos): """Takes care of the formating for the timing statistics.""" + if not CFG["db"]["enabled"]: + return run_infos + + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s session = s.Session() diff --git a/benchbuild/plugins.py b/benchbuild/plugins.py index 5070399f7..386ec9f20 100644 --- a/benchbuild/plugins.py +++ b/benchbuild/plugins.py @@ -23,15 +23,19 @@ LOG = logging.getLogger(__name__) -def discover() -> None: +def discover() -> bool: """Import all plugins listed in our configuration.""" + something_imported = False if CFG["plugins"]["autoload"]: experiment_plugins = CFG["plugins"]["experiments"].value project_plugins = CFG["plugins"]["projects"].value for plugin in itertools.chain(experiment_plugins, project_plugins): + something_imported = True try: importlib.import_module(plugin) except ImportError as import_error: LOG.error("Could not find '%s'", import_error.name) LOG.debug("ImportError: %s", import_error) + + return something_imported diff --git a/benchbuild/project.py b/benchbuild/project.py index 2b9db054c..30854fa84 100644 --- a/benchbuild/project.py +++ b/benchbuild/project.py @@ -336,7 +336,8 @@ def __default_primary_source(self) -> str: # pylint: disable=unused-private-mem runtime_extension = attr.ib(default=None) def __attrs_post_init__(self) -> None: - db.persist_project(self) + if CFG["db"]["enabled"]: + db.persist_project(self) # select container image if isinstance(type(self).CONTAINER, ContainerImage): @@ -345,7 +346,7 @@ def __attrs_post_init__(self) -> None: ) else: primary_source = primary(*self.SOURCE) - if isinstance(primary_source,source.BaseVersionFilter): + if isinstance(primary_source, source.BaseVersionFilter): primary_source = primary_source.child if not isinstance(primary_source, Git): raise AssertionError( @@ -361,7 +362,6 @@ def __attrs_post_init__(self) -> None: self.container = copy.deepcopy(image) break - def clean(self) -> None: """Clean the project build directory.""" builddir_p = local.path(self.builddir) diff --git a/benchbuild/res/wrapping/run_compiler.py.inc b/benchbuild/res/wrapping/run_compiler.py.inc index 2cf9c9610..6b45852e5 100644 --- a/benchbuild/res/wrapping/run_compiler.py.inc +++ b/benchbuild/res/wrapping/run_compiler.py.inc @@ -6,6 +6,12 @@ os.environ["OPENBLAS_NUM_THREADS"] = "4" from plumbum import TEE, local +# Performance optimization for benchbuild: don't import any experiments or +# projects. Everything necessary should be imported when loading (unpickling) +# the project and the compiler. +os.environ["BB_PLUGINS_AUTOLOAD"] = "False" + +from benchbuild.settings import CFG from benchbuild.utils import log from benchbuild.utils.db import persist_project from benchbuild.utils.run import exit_code_from_run_infos @@ -24,7 +30,8 @@ def update_project(argv): name = project_p.basename break PROJECT.name = name - persist_project(PROJECT) + if CFG["db"]["enabled"]: + persist_project(PROJECT) def main(argv): diff --git a/benchbuild/res/wrapping/run_dynamic.py.inc b/benchbuild/res/wrapping/run_dynamic.py.inc index 40c058f1b..30939ff4b 100644 --- a/benchbuild/res/wrapping/run_dynamic.py.inc +++ b/benchbuild/res/wrapping/run_dynamic.py.inc @@ -6,6 +6,11 @@ import sys from plumbum import TEE, local +# Performance optimization for benchbuild: don't import any experiments or +# projects. Everything necessary should be imported when loading (unpickling) +# the project. +os.environ["BB_PLUGINS_AUTOLOAD"] = "False" + from benchbuild.utils import log from benchbuild.utils.db import persist_project from benchbuild.utils.run import exit_code_from_run_infos diff --git a/benchbuild/res/wrapping/run_static.py.inc b/benchbuild/res/wrapping/run_static.py.inc index f64aac0f2..ba8cc105a 100644 --- a/benchbuild/res/wrapping/run_static.py.inc +++ b/benchbuild/res/wrapping/run_static.py.inc @@ -1,9 +1,15 @@ #!{{ python|default("/usr/bin/env python3") }} # +import os import sys from plumbum import TEE, local +# Performance optimization for benchbuild: don't import any experiments or +# projects. Everything necessary should be imported when loading (unpickling) +# the project. +os.environ["BB_PLUGINS_AUTOLOAD"] = "False" + from benchbuild.utils import log from benchbuild.utils.run import exit_code_from_run_infos from benchbuild.utils.wrapping import load diff --git a/benchbuild/settings.py b/benchbuild/settings.py index e4d71c367..9024aab36 100644 --- a/benchbuild/settings.py +++ b/benchbuild/settings.py @@ -145,9 +145,13 @@ } CFG['db'] = { + "enabled": { + "desc": "Whether the database is enabled.", + "default": False + }, "connect_string": { "desc": "sqlalchemy connect string", - "default": "sqlite://" + "default": "" }, "rollback": { "desc": "Rollback all operations after benchbuild completes.", @@ -381,7 +385,7 @@ "storage_driver": { "default": "vfs", "desc": "Storage driver for containers." - }, + }, "input": { "default": "container.tar.bz2", "desc": "Input container file/folder." diff --git a/benchbuild/utils/actions.py b/benchbuild/utils/actions.py index 1ccf7c7bc..bd0fe82ad 100644 --- a/benchbuild/utils/actions.py +++ b/benchbuild/utils/actions.py @@ -25,8 +25,6 @@ import typing as tp from datetime import datetime -import pathos.multiprocessing as mp -import sqlalchemy as sa from plumbum import ProcessExecutionError from benchbuild import command, signals, source @@ -250,7 +248,7 @@ def clean_mountpoints(root: str) -> None: root: All UnionFS-mountpoints under this directory will be unmounted. """ - import psutil + import psutil # pylint: disable=import-outside-toplevel umount_paths = [] real_root = os.path.realpath(root) @@ -341,22 +339,27 @@ def __init__( self.experiment = experiment def __call__(self) -> StepResult: - group, session = run.begin_run_group(self.project, self.experiment) - signals.handlers.register(run.fail_run_group, group, session) + if CFG["db"]["enabled"]: + group, session = run.begin_run_group(self.project, self.experiment) + signals.handlers.register(run.fail_run_group, group, session) try: self.project.run_tests() - run.end_run_group(group, session) + if CFG["db"]["enabled"]: + run.end_run_group(group, session) self.status = StepResult.OK except ProcessExecutionError: - run.fail_run_group(group, session) + if CFG["db"]["enabled"]: + run.fail_run_group(group, session) self.status = StepResult.ERROR raise except KeyboardInterrupt: - run.fail_run_group(group, session) + if CFG["db"]["enabled"]: + run.fail_run_group(group, session) self.status = StepResult.ERROR raise finally: - signals.handlers.deregister(run.fail_run_group) + if CFG["db"]["enabled"]: + signals.handlers.deregister(run.fail_run_group) return self.status @@ -444,6 +447,7 @@ def __init__( def begin_transaction( self, ) -> tp.Tuple["benchbuild.utils.schema.Experiment", tp.Any]: + import sqlalchemy as sa # pylint: disable=import-outside-toplevel experiment, session = db.persist_experiment(self.experiment) if experiment.begin is None: experiment.begin = datetime.now() @@ -467,6 +471,7 @@ def begin_transaction( def end_transaction( experiment: "benchbuild.utils.schema.Experiment", session: tp.Any ) -> None: + import sqlalchemy as sa # pylint: disable=import-outside-toplevel try: experiment.end = max(experiment.end, datetime.now()) session.add(experiment) @@ -475,6 +480,9 @@ def end_transaction( LOG.error(inv_req) def __run_children(self, num_processes: int) -> tp.List[StepResult]: + # pylint: disable=import-outside-toplevel + import pathos.multiprocessing as mp + results = [] actions = self.actions @@ -496,12 +504,14 @@ def __run_children(self, num_processes: int) -> tp.List[StepResult]: def __call__(self) -> StepResult: results = [] session = None - experiment, session = self.begin_transaction() + if CFG["db"]["enabled"]: + experiment, session = self.begin_transaction() try: results = self.__run_children(int(CFG["parallel_processes"])) finally: - self.end_transaction(experiment, session) - signals.handlers.deregister(self.end_transaction) + if CFG["db"]["enabled"]: + self.end_transaction(experiment, session) + signals.handlers.deregister(self.end_transaction) self.status = max(results) if results else StepResult.OK return self.status @@ -636,22 +646,27 @@ def __init__( ]) def __call__(self) -> StepResult: - group, session = run.begin_run_group(self.project, self.experiment) - signals.handlers.register(run.fail_run_group, group, session) + if CFG["db"]["enabled"]: + group, session = run.begin_run_group(self.project, self.experiment) + signals.handlers.register(run.fail_run_group, group, session) try: self.status = max([workload() for workload in self.actions], default=StepResult.OK) - run.end_run_group(group, session) + if CFG["db"]["enabled"]: + run.end_run_group(group, session) except ProcessExecutionError: - run.fail_run_group(group, session) + if CFG["db"]["enabled"]: + run.fail_run_group(group, session) self.status = StepResult.ERROR raise except KeyboardInterrupt: - run.fail_run_group(group, session) + if CFG["db"]["enabled"]: + run.fail_run_group(group, session) self.status = StepResult.ERROR raise finally: - signals.handlers.deregister(run.fail_run_group) + if CFG["db"]["enabled"]: + signals.handlers.deregister(run.fail_run_group) return self.status diff --git a/benchbuild/utils/db.py b/benchbuild/utils/db.py index d8e50e4a3..26adc4e54 100644 --- a/benchbuild/utils/db.py +++ b/benchbuild/utils/db.py @@ -1,8 +1,6 @@ """Database support module for the benchbuild study.""" import logging -from sqlalchemy.exc import IntegrityError - from benchbuild.settings import CFG LOG = logging.getLogger(__name__) @@ -39,6 +37,7 @@ def create_run(cmd, project, exp, grp): The inserted tuple representing the run and the session opened with the new run. Don't forget to commit it at some point. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s session = s.Session() @@ -73,6 +72,7 @@ def create_run_group(prj, experiment): A tuple (group, session) containing both the newly created run_group and the transaction object. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s session = s.Session() @@ -137,6 +137,9 @@ def persist_experiment(experiment): Args: experiment: The experiment we want to persist. """ + # pylint: disable=import-outside-toplevel + from sqlalchemy.exc import IntegrityError + from benchbuild.utils.schema import Experiment, Session session = Session() @@ -177,6 +180,7 @@ def persist_time(run, session, timings): session: The db transaction we belong to. timings: The timing measurements we want to store. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s for timing in timings: @@ -203,6 +207,7 @@ def persist_perf(run, session, svg_path): session: The db transaction we belong to. svg_path: The path to the SVG file we want to store. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s with open(svg_path, 'r') as svg_file: @@ -235,6 +240,7 @@ def persist_config(run, session, cfg): session: The db transaction we belong to. cfg: The configuration we want to persist. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s for cfg_elem in cfg: diff --git a/benchbuild/utils/revision_ranges.py b/benchbuild/utils/revision_ranges.py index 519d2faba..085f91629 100644 --- a/benchbuild/utils/revision_ranges.py +++ b/benchbuild/utils/revision_ranges.py @@ -7,12 +7,14 @@ import typing as tp from enum import IntFlag -import pygit2 from plumbum.machines import LocalCommand from benchbuild.source import Git from benchbuild.utils.cmd import git as local_git +if tp.TYPE_CHECKING: + import pygit2 + def _get_git_for_path(repo_path: str) -> LocalCommand: """ @@ -158,9 +160,9 @@ class CommitState(IntFlag): def _find_blocked_commits( - commit: pygit2.Commit, good: tp.List[pygit2.Commit], - bad: tp.List[pygit2.Commit] -) -> tp.List[pygit2.Commit]: + commit: 'pygit2.Commit', good: tp.List['pygit2.Commit'], + bad: tp.List['pygit2.Commit'] +) -> tp.List['pygit2.Commit']: """ Find all commits affected by a bad commit and not yet "fixed" by a good commit. This is done by performing a backwards search starting @@ -175,8 +177,8 @@ def _find_blocked_commits( All transitive parents of commit that have an ancestor from bad that is not fixed by some commit from good. """ - stack: tp.List[pygit2.Commit] = [commit] - blocked: tp.Dict[pygit2.Commit, CommitState] = {} + stack: tp.List['pygit2.Commit'] = [commit] + blocked: tp.Dict['pygit2.Commit', CommitState] = {} while stack: current_commit = stack.pop() @@ -239,6 +241,7 @@ def __init__( self.__revision_list: tp.Optional[tp.List[str]] = None def init_cache(self, repo_path: str) -> None: + import pygit2 # pylint: disable=import-outside-toplevel self.__revision_list = [] repo = pygit2.Repository(repo_path) git = _get_git_for_path(repo_path) diff --git a/benchbuild/utils/run.py b/benchbuild/utils/run.py index 6bb96c56f..fe9f9735b 100644 --- a/benchbuild/utils/run.py +++ b/benchbuild/utils/run.py @@ -66,6 +66,10 @@ def __begin(self, command: BaseCommand, project, experiment, group): (run, session), where run is the generated run instance and session the associated transaction for later use. """ + if not CFG["db"]["enabled"]: + return + + # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s from benchbuild.utils.db import create_run @@ -95,6 +99,10 @@ def __end(self, stdout, stderr): stdout: The stdout we captured of the run. stderr: The stderr we capture of the run. """ + if not CFG["db"]["enabled"]: + return + + # pylint: disable=import-outside-toplevel from benchbuild.utils.schema import RunLog run_id = self.db_run.id @@ -124,6 +132,10 @@ def __fail(self, retcode, stdout, stderr): stdout: The stdout we captured of the run. stderr: The stderr we capture of the run. """ + if not CFG["db"]["enabled"]: + return + + # pylint: disable=import-outside-toplevel from benchbuild.utils.schema import RunLog run_id = self.db_run.id @@ -157,8 +169,9 @@ def __attrs_post_init__(self): ) signals.handlers.register(self.__fail, 15, "SIGTERM", "SIGTERM") - run_id = self.db_run.id - settings.CFG["db"]["run_id"] = run_id + if CFG["db"]["enabled"]: + run_id = self.db_run.id + settings.CFG["db"]["run_id"] = run_id def add_payload(self, name, payload): if self == payload: @@ -214,7 +227,8 @@ def __call__(self, *args, expected_retcode=0, ri=None, **kwargs): return self def commit(self): - self.session.commit() + if CFG["db"]["enabled"]: + self.session.commit() def begin_run_group(project, experiment): @@ -231,6 +245,7 @@ def begin_run_group(project, experiment): ``(group, session)`` where group is the created group in the database and session is the database session this group lives in. """ + # pylint: disable=import-outside-toplevel from benchbuild.utils.db import create_run_group group, session = create_run_group(project, experiment) @@ -345,6 +360,7 @@ def with_env_recursive(cmd: BaseCommand, **envvars: str) -> BaseCommand: Returns: The updated command. """ + # pylint: disable=import-outside-toplevel from plumbum.commands.base import BoundCommand, BoundEnvCommand if isinstance(cmd, BoundCommand): cmd.cmd = with_env_recursive(cmd.cmd, **envvars) diff --git a/benchbuild/utils/settings.py b/benchbuild/utils/settings.py index 8cf0f5411..3dd31e8a1 100644 --- a/benchbuild/utils/settings.py +++ b/benchbuild/utils/settings.py @@ -22,7 +22,6 @@ import schema import six import yaml -from pkg_resources import DistributionNotFound, get_distribution from plumbum import LocalPath, local import benchbuild.utils.user_interface as ui @@ -36,11 +35,22 @@ def __getitem__(self: 'Indexable', key: str) -> 'Indexable': pass -try: - __version__ = get_distribution("benchbuild").version -except DistributionNotFound: - __version__ = "unknown" - LOG.error("could not find version information.") +# Importing pkg_resources is slow. Starting with Python 3.8, there is a better +# option. +if sys.version_info >= (3, 8): + from importlib.metadata import version, PackageNotFoundError + try: + __version__ = version("benchbuild") + except PackageNotFoundError: + __version__ = "unknown" + LOG.error("could not find version information.") +else: + from pkg_resources import DistributionNotFound, get_distribution + try: + __version__ = get_distribution("benchbuild").version + except DistributionNotFound: + __version__ = "unknown" + LOG.error("could not find version information.") def available_cpu_count() -> int: @@ -142,7 +152,7 @@ def is_yaml(cfg_file: str) -> bool: return os.path.splitext(cfg_file)[1] in [".yml", ".yaml"] -class ConfigLoader(yaml.SafeLoader): +class ConfigLoader(yaml.CSafeLoader): # type: ignore """Avoid polluting yaml's namespace with our modifications.""" @@ -322,17 +332,16 @@ def init_from_env(self) -> None: if 'default' in self.node: env_var = self.__to_env_var__().upper() - if self.has_value(): - env_val = self.node['value'] - else: - env_val = self.node['default'] - env_val = os.getenv(env_var, to_yaml(env_val)) - try: - self.node['value'] = yaml.load( - str(env_val), Loader=ConfigLoader - ) - except ValueError: - self.node['value'] = env_val + if not self.has_value(): + self.node['value'] = self.node['default'] + env_val = os.getenv(env_var, None) + if env_val is not None: + try: + self.node['value'] = yaml.load( + str(env_val), Loader=ConfigLoader + ) + except ValueError: + self.node['value'] = env_val else: if isinstance(self.node, dict): for k in self.node: diff --git a/benchbuild/utils/wrapping.py b/benchbuild/utils/wrapping.py index 8b1a1bc15..9797c7eda 100644 --- a/benchbuild/utils/wrapping.py +++ b/benchbuild/utils/wrapping.py @@ -27,10 +27,8 @@ import sys import typing as tp from pathlib import Path -from typing import TYPE_CHECKING import dill -import jinja2 import plumbum as pb from plumbum import local from plumbum.commands.base import BoundCommand @@ -50,6 +48,8 @@ dill.settings['byref'] = True if tp.TYPE_CHECKING: + import jinja2 + import benchbuild.project.Project # pylint: disable=unused-import @@ -77,7 +77,8 @@ def strip_path_prefix(ipath: Path, prefix: Path) -> Path: return ipath -def __create_jinja_env() -> jinja2.Environment: +def __create_jinja_env() -> 'jinja2.Environment': + import jinja2 # pylint: disable=import-outside-toplevel return jinja2.Environment( trim_blocks=True, lstrip_blocks=True,