From e497b9c1ea9f5f7e6acb06065dbb16baa926e626 Mon Sep 17 00:00:00 2001 From: Aleksei Stepanov Date: Sat, 25 Dec 2021 21:17:32 +0100 Subject: [PATCH] Fix #6256 : migrate to python 3.7+ * Update classifiers * Update python_requires * Fix deprecated code (some from python 2.x): - `io.open` -> `open` - `IOError` -> `OSError` (in python 3 `IOError` is alias) - `socket.error` -> `OSError` - `u''` -> `''` - `str().format` -> f-strings (where it was quick automatically validated change) - `object` is not needed as base class - use `set` comprehensions - `sum` over generators (do not produce temporary lists) - use plain `super()` * remove unused imports --- docs/source/conf.py | 2 +- notebook/__main__.py | 1 - notebook/_sysinfo.py | 6 +- notebook/auth/login.py | 14 +- notebook/auth/security.py | 7 +- notebook/auth/tests/test_security.py | 10 +- notebook/base/handlers.py | 106 ++++++------- notebook/base/zmqhandlers.py | 4 +- notebook/bundler/bundlerextensions.py | 62 ++++---- notebook/bundler/tarball_bundler.py | 6 +- notebook/bundler/tests/test_bundler_api.py | 17 +-- notebook/config_manager.py | 5 +- notebook/edit/handlers.py | 4 +- notebook/extensions.py | 6 +- notebook/gateway/handlers.py | 16 +- notebook/gateway/managers.py | 4 +- notebook/i18n/__init__.py | 5 +- notebook/jstest.py | 30 ++-- notebook/kernelspecs/handlers.py | 4 +- notebook/nbconvert/handlers.py | 2 +- .../tests/test_nbconvert_handlers.py | 42 +++-- notebook/nbextensions.py | 130 ++++++++-------- notebook/notebook/handlers.py | 6 +- notebook/notebookapp.py | 52 +++---- notebook/prometheus/log_functions.py | 2 +- notebook/serverextensions.py | 26 ++-- notebook/services/config/handlers.py | 3 - .../services/config/tests/test_config_api.py | 2 +- notebook/services/contents/checkpoints.py | 6 +- notebook/services/contents/filecheckpoints.py | 14 +- notebook/services/contents/fileio.py | 19 ++- notebook/services/contents/filemanager.py | 53 +++---- notebook/services/contents/handlers.py | 25 ++- .../services/contents/largefilemanager.py | 20 ++- notebook/services/contents/manager.py | 60 ++++---- .../contents/tests/test_contents_api.py | 144 +++++++++--------- .../services/contents/tests/test_fileio.py | 68 ++++----- .../contents/tests/test_largefilemanager.py | 4 +- .../services/contents/tests/test_manager.py | 38 ++--- notebook/services/kernels/handlers.py | 4 +- notebook/services/kernels/kernelmanager.py | 4 +- .../kernels/tests/test_kernels_api.py | 12 +- notebook/services/kernelspecs/handlers.py | 2 +- .../kernelspecs/tests/test_kernelspecs_api.py | 29 ++-- .../nbconvert/tests/test_nbconvert_api.py | 2 +- notebook/services/sessions/sessionmanager.py | 4 +- .../sessions/tests/test_sessionmanager.py | 44 +++--- .../sessions/tests/test_sessions_api.py | 14 +- notebook/terminal/handlers.py | 4 +- notebook/terminal/terminalmanager.py | 4 +- notebook/terminal/tests/test_terminals_api.py | 2 +- notebook/tests/conftest.py | 4 +- notebook/tests/selenium/conftest.py | 4 +- notebook/tests/selenium/test_deletecell.py | 7 +- .../tests/selenium/test_multiselect_toggle.py | 3 +- .../tests/selenium/test_prompt_numbers.py | 1 - .../tests/selenium/test_save_as_notebook.py | 4 +- notebook/tests/selenium/utils.py | 66 ++++---- notebook/tests/test_files.py | 40 +++-- notebook/tests/test_gateway.py | 8 +- notebook/tests/test_nbextensions.py | 72 ++++----- notebook/tests/test_notebookapp.py | 4 +- notebook/tests/test_paths.py | 1 - notebook/tests/test_serverextensions.py | 10 +- notebook/tests/test_utils.py | 2 +- notebook/traittypes.py | 4 +- notebook/tree/tests/test_tree_handler.py | 9 +- notebook/utils.py | 6 +- notebook/view/handlers.py | 2 +- setup.py | 8 +- setupbase.py | 50 +++--- tools/secure_notebook.py | 19 +-- 72 files changed, 709 insertions(+), 765 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 1aa7613b42..9482f01205 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -319,7 +319,7 @@ # -- Options for link checks ---------------------------------------------- linkcheck_ignore = [ - 'http://127\.0\.0\.1/*' + r'http://127\.0\.0\.1/*' ] diff --git a/notebook/__main__.py b/notebook/__main__.py index ee339caebf..c11733204c 100644 --- a/notebook/__main__.py +++ b/notebook/__main__.py @@ -1,4 +1,3 @@ - if __name__ == '__main__': from notebook import notebookapp as app app.launch_new_instance() diff --git a/notebook/_sysinfo.py b/notebook/_sysinfo.py index 951ffd1014..4abeadcedb 100644 --- a/notebook/_sysinfo.py +++ b/notebook/_sysinfo.py @@ -55,10 +55,10 @@ def pkg_commit_hash(pkg_path): if repo_commit: return 'repository', repo_commit.strip().decode('ascii') else: - return u'', u'' + return '', '' par_path = p.dirname(par_path) - - return u'', u'' + + return '', '' def pkg_info(pkg_path): diff --git a/notebook/auth/login.py b/notebook/auth/login.py index 1ac434dc5e..0b0979134d 100644 --- a/notebook/auth/login.py +++ b/notebook/auth/login.py @@ -73,13 +73,13 @@ def hashed_password(self): def passwd_check(self, a, b): return passwd_check(a, b) - + def post(self): - typed_password = self.get_argument('password', default=u'') - new_password = self.get_argument('new_password', default=u'') + typed_password = self.get_argument('password', default='') + new_password = self.get_argument('new_password', default='') + - if self.get_login_available(self.settings): if self.passwd_check(self.hashed_password, typed_password) and not new_password: self.set_login_cookie(self, uuid.uuid4().hex) @@ -112,7 +112,7 @@ def set_login_cookie(cls, handler, user_id=None): handler.set_secure_cookie(handler.cookie_name, user_id, **cookie_options) return user_id - auth_header_pat = re.compile('token\s+(.+)', re.IGNORECASE) + auth_header_pat = re.compile(r'token\s+(.+)', re.IGNORECASE) @classmethod def get_token(cls, handler): @@ -197,7 +197,7 @@ def get_user(cls, handler): @classmethod def get_user_token(cls, handler): """Identify the user based on a token in the URL or Authorization header - + Returns: - uuid if authenticated - None if not @@ -245,7 +245,7 @@ def password_from_settings(cls, settings): If there is no configured password, an empty string will be returned. """ - return settings.get('password', u'') + return settings.get('password', '') @classmethod def get_login_available(cls, settings): diff --git a/notebook/auth/security.py b/notebook/auth/security.py index cc9723901b..d10a9c2d22 100644 --- a/notebook/auth/security.py +++ b/notebook/auth/security.py @@ -5,7 +5,6 @@ from contextlib import contextmanager import getpass import hashlib -import io import json import os import random @@ -135,7 +134,7 @@ def passwd_check(hashed_passphrase, passphrase): def persist_config(config_file=None, mode=0o600): """Context manager that can be used to modify a config object - On exit of the context manager, the config will be written back to disk, + On exit of the context manager, the config will be written back to disk, by default with user-only (600) permissions. """ @@ -152,7 +151,7 @@ def persist_config(config_file=None, mode=0o600): yield config - with io.open(config_file, 'w', encoding='utf8') as f: + with open(config_file, 'w', encoding='utf8') as f: f.write(cast_unicode(json.dumps(config, indent=2))) try: @@ -165,7 +164,7 @@ def persist_config(config_file=None, mode=0o600): def set_password(password=None, config_file=None): """Ask user for password, store it in notebook json configuration file""" - + hashed_password = passwd(password) with persist_config(config_file) as config: diff --git a/notebook/auth/tests/test_security.py b/notebook/auth/tests/test_security.py index cf748cda82..b042fcbfa9 100644 --- a/notebook/auth/tests/test_security.py +++ b/notebook/auth/tests/test_security.py @@ -18,8 +18,8 @@ def test_bad(): def test_passwd_check_unicode(): # GH issue #4524 - phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' - assert passwd_check(phash, u"łe¶ŧ←↓→") - phash = (u'argon2:$argon2id$v=19$m=10240,t=10,p=8$' - u'qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg') - assert passwd_check(phash, u"łe¶ŧ←↓→") + phash = 'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' + assert passwd_check(phash, "łe¶ŧ←↓→") + phash = ('argon2:$argon2id$v=19$m=10240,t=10,p=8$' + 'qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg') + assert passwd_check(phash, "łe¶ŧ←↓→") diff --git a/notebook/base/handlers.py b/notebook/base/handlers.py index 216480291d..783261a5e9 100755 --- a/notebook/base/handlers.py +++ b/notebook/base/handlers.py @@ -59,7 +59,7 @@ class AuthenticatedHandler(web.RequestHandler): @property def content_security_policy(self): """The default Content-Security-Policy header - + Can be overridden by defining Content-Security-Policy in settings['headers'] """ if 'Content-Security-Policy' in self.settings.get('headers', {}): @@ -132,7 +132,7 @@ def get_current_user(self): def skip_check_origin(self): """Ask my login_handler if I should skip the origin_check - + For example: in the default LoginHandler, if a request is token-authenticated, origin checking should be skipped. """ @@ -156,7 +156,7 @@ def cookie_name(self): self.request.host )) return self.settings.get('cookie_name', default_cookie_name) - + @property def logged_in(self): """Is a user currently logged in?""" @@ -188,14 +188,14 @@ def login_available(self): class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling - + Mostly property shortcuts to IPython-specific settings. """ @property def ignore_minified_js(self): """Wether to user bundle in template. (*.min files) - + Mainly use for development and avoid file recompilation """ return self.settings.get('ignore_minified_js', False) @@ -203,7 +203,7 @@ def ignore_minified_js(self): @property def config(self): return self.settings.get('config', None) - + @property def log(self): """use the IPython log by default, falling back on tornado's logger""" @@ -213,23 +213,23 @@ def log(self): def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get('jinja_template_vars', {}) - + #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- - + @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get('version_hash', '') - + @property def mathjax_url(self): url = self.settings.get('mathjax_url', '') if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) - + @property def mathjax_config(self): return self.settings.get('mathjax_config', 'TeX-AMS-MML_HTMLorMML-full,Safe') @@ -251,11 +251,11 @@ def contents_js_source(self): self.log.debug("Using contents: %s", self.settings.get('contents_js_source', 'services/contents')) return self.settings.get('contents_js_source', 'services/contents') - + #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- - + @property def kernel_manager(self): return self.settings['kernel_manager'] @@ -263,15 +263,15 @@ def kernel_manager(self): @property def contents_manager(self): return self.settings['contents_manager'] - + @property def session_manager(self): return self.settings['session_manager'] - + @property def terminal_manager(self): return self.settings['terminal_manager'] - + @property def kernel_spec_manager(self): return self.settings['kernel_spec_manager'] @@ -283,22 +283,22 @@ def config_manager(self): #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- - + @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') - + @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) - + @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) - + def set_default_headers(self): """Add CORS headers, if defined""" super().set_default_headers() @@ -320,7 +320,7 @@ def set_default_headers(self): if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') - + def set_attachment_header(self, filename): """Set Content-Disposition: attachment header @@ -424,7 +424,7 @@ def check_referer(self): return True # apply cross-origin checks to Referer: - origin = "{}://{}".format(referer_url.scheme, referer_url.netloc) + origin = f"{referer_url.scheme}://{referer_url.netloc}" if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: @@ -453,7 +453,7 @@ def check_xsrf_cookie(self): if not self.check_referer(): referer = self.request.headers.get('Referer') if referer: - msg = "Blocking Cross Origin request from {}.".format(referer) + msg = f"Blocking Cross Origin request from {referer}." else: msg = "Blocking request from unknown origin" raise web.HTTPError(403, msg) from e @@ -505,16 +505,16 @@ def prepare(self): #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- - + def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) - + def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) - + @property def template_namespace(self): return dict( @@ -537,19 +537,19 @@ def template_namespace(self): self.request.headers.get('Accept-Language', ''))), **self.jinja_template_vars ) - + def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? - body = self.request.body.strip().decode(u'utf-8') + body = self.request.body.strip().decode('utf-8') try: model = json.loads(body) except Exception as e: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) - raise web.HTTPError(400, u'Invalid JSON in body of request') from e + raise web.HTTPError(400, 'Invalid JSON in body of request') from e return model def write_error(self, status_code, **kwargs): @@ -565,12 +565,12 @@ def write_error(self, status_code, **kwargs): message = exception.log_message % exception.args except Exception: pass - + # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason - + # build template namespace ns = dict( status_code=status_code, @@ -741,15 +741,15 @@ def set_headers(self): # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") - + def compute_etag(self): return None - + def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. - + Requires tornado 3.1 - + Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super().validate_absolute_path(root, absolute_path) @@ -762,12 +762,12 @@ def validate_absolute_path(self, root, absolute_path): def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. - + This should be used on any JSON API on any handler method that can raise HTTPErrors. - + This will grab the latest HTTPError exception using sys.exc_info and then: - + 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. @@ -793,31 +793,31 @@ def wrapper(self, *args, **kwargs): class FileFindHandler(IPythonHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" - + # cache search results, don't search for files more than once _static_paths = {} - + def set_headers(self): super().set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or \ any(self.request.path.startswith(path) for path in self.no_cache_paths): self.set_header("Cache-Control", "no-cache") - + def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] - + if isinstance(path, string_types): path = [path] - + self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename - + def compute_etag(self): return None - + @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" @@ -826,25 +826,25 @@ def get_absolute_path(cls, roots, path): return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) - except IOError: + except OSError: # IOError means not found return '' - + cls._static_paths[path] = abspath - + log().debug("Path %s served from %s"%(path, abspath)) return abspath - + def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) - + for root in self.root: if (absolute_path + os.sep).startswith(root): break - + return super().validate_absolute_path(root, absolute_path) @@ -874,11 +874,11 @@ def get(self): class FilesRedirectHandler(IPythonHandler): """Handler for redirecting relative URLs to the /files/ handler""" - + @staticmethod def redirect_to_files(self, path): """make redirect logic a reusable static method - + so it can be called from other handlers. """ cm = self.contents_manager @@ -903,7 +903,7 @@ def redirect_to_files(self, path): url = url_path_join(self.base_url, 'files', url_escape(path)) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - + def get(self, path=''): return self.redirect_to_files(self, path) diff --git a/notebook/base/zmqhandlers.py b/notebook/base/zmqhandlers.py index c4d4554a89..e360d69bf9 100644 --- a/notebook/base/zmqhandlers.py +++ b/notebook/base/zmqhandlers.py @@ -87,7 +87,7 @@ def deserialize_binary_message(bmsg): WS_PING_INTERVAL = 30000 -class WebSocketMixin(object): +class WebSocketMixin: """Mixin for common websocket options""" ping_callback = None last_ping = 0 @@ -167,7 +167,7 @@ def open(self, *args, **kwargs): self.send_ping, self.ping_interval, ) self.ping_callback.start() - return super(WebSocketMixin, self).open(*args, **kwargs) + return super().open(*args, **kwargs) def send_ping(self): """send a ping to keep the websocket alive""" diff --git a/notebook/bundler/bundlerextensions.py b/notebook/bundler/bundlerextensions.py index 2ac346f971..4df1f271e3 100644 --- a/notebook/bundler/bundlerextensions.py +++ b/notebook/bundler/bundlerextensions.py @@ -17,14 +17,14 @@ def _get_bundler_metadata(module): """Gets the list of bundlers associated with a Python package. - + Returns a tuple of (the module, [{ 'name': 'unique name of the bundler', 'label': 'file menu item label for the bundler', 'module_name': 'dotted package/module name containing the bundler', 'group': 'download or deploy parent menu item' }]) - + Parameters ---------- @@ -34,16 +34,16 @@ def _get_bundler_metadata(module): """ m = import_item(module) if not hasattr(m, '_jupyter_bundlerextension_paths'): - raise KeyError('The Python module {} does not contain a valid bundlerextension'.format(module)) + raise KeyError(f'The Python module {module} does not contain a valid bundlerextension') bundlers = m._jupyter_bundlerextension_paths() return m, bundlers def _set_bundler_state(name, label, module_name, group, state, user=True, sys_prefix=False, logger=None): """Set whether a bundler is enabled or disabled. - + Returns True if the final state is the one requested. - + Parameters ---------- name : string @@ -68,14 +68,14 @@ def _set_bundler_state(name, label, module_name, group, state, config_dir = os.path.join( _get_config_dir(user=user, sys_prefix=sys_prefix), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - + if logger: logger.info("{} {} bundler {}...".format( "Enabling" if state else "Disabling", name, module_name )) - + if state: cm.update(BUNDLER_SECTION, { BUNDLER_SUBSECTION: { @@ -96,13 +96,13 @@ def _set_bundler_state(name, label, module_name, group, state, return (cm.get(BUNDLER_SECTION) .get(BUNDLER_SUBSECTION, {}) .get(name) is not None) == state - + def _set_bundler_state_python(state, module, user, sys_prefix, logger=None): """Enables or disables bundlers defined in a Python package. - + Returns a list of whether the state was achieved for each bundler. - + Parameters ---------- state : Bool @@ -129,9 +129,9 @@ def _set_bundler_state_python(state, module, user, sys_prefix, logger=None): def enable_bundler_python(module, user=True, sys_prefix=False, logger=None): """Enables bundlers defined in a Python package. - + Returns whether each bundle defined in the packaged was enabled or not. - + Parameters ---------- module : str @@ -147,12 +147,12 @@ def enable_bundler_python(module, user=True, sys_prefix=False, logger=None): """ return _set_bundler_state_python(True, module, user, sys_prefix, logger=logger) - + def disable_bundler_python(module, user=True, sys_prefix=False, logger=None): """Disables bundlers defined in a Python package. - + Returns whether each bundle defined in the packaged was enabled or not. - + Parameters ---------- module : str @@ -176,13 +176,13 @@ class ToggleBundlerExtensionApp(BaseExtensionApp): description = "Enable/disable a bundlerextension in configuration." user = Bool(True, config=True, help="Apply the configuration only for the current user (default)") - + _toggle_value = None - + def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def toggle_bundler_python(self, module): """Toggle some extensions in an importable Python module. @@ -210,25 +210,25 @@ def start(self): if self.python: self.toggle_bundler_python(self.extra_args[0]) else: - raise NotImplementedError('Cannot install bundlers from non-Python packages') + raise NotImplementedError('Cannot install bundlers from non-Python packages') class EnableBundlerExtensionApp(ToggleBundlerExtensionApp): """An App that enables bundlerextensions""" name = "jupyter bundlerextension enable" description = """ Enable a bundlerextension in frontend configuration. - + Usage jupyter bundlerextension enable [--system|--sys-prefix] """ _toggle_value = True - + class DisableBundlerExtensionApp(ToggleBundlerExtensionApp): """An App that disables bundlerextensions""" name = "jupyter bundlerextension disable" description = """ Disable a bundlerextension in frontend configuration. - + Usage jupyter bundlerextension disable [--system|--sys-prefix] """ @@ -240,15 +240,15 @@ class ListBundlerExtensionApp(BaseExtensionApp): name = "jupyter nbextension list" version = __version__ description = "List all nbextensions known by the configuration system" - + def list_nbextensions(self): """List all the nbextensions""" config_dirs = [os.path.join(p, 'nbconfig') for p in jupyter_config_path()] - + print("Known bundlerextensions:") - + for config_dir in config_dirs: - head = u' config dir: {}'.format(config_dir) + head = f' config dir: {config_dir}' head_shown = False cm = BaseJSONConfigManager(parent=self, config_dir=config_dir) @@ -258,18 +258,16 @@ def list_nbextensions(self): # only show heading if there is an nbextension here print(head) head_shown = True - + for bundler_id, info in data['bundlerextensions'].items(): label = info.get('label') module = info.get('module_name') if label is None or module is None: - msg = u' {} {}'.format(bundler_id, RED_DISABLED) + msg = f' {bundler_id} {RED_DISABLED}' else: - msg = u' "{}" from {} {}'.format( - label, module, GREEN_ENABLED - ) + msg = f' "{label}" from {module} {GREEN_ENABLED}' print(msg) - + def start(self): """Perform the App's functions as configured""" self.list_nbextensions() diff --git a/notebook/bundler/tarball_bundler.py b/notebook/bundler/tarball_bundler.py index c112841482..74861e7a78 100644 --- a/notebook/bundler/tarball_bundler.py +++ b/notebook/bundler/tarball_bundler.py @@ -20,7 +20,7 @@ def _jupyter_bundlerextension_paths(): def bundle(handler, model): """Create a compressed tarball containing the notebook document. - + Parameters ---------- handler : tornado.web.RequestHandler @@ -31,8 +31,8 @@ def bundle(handler, model): notebook_filename = model['name'] notebook_content = nbformat.writes(model['content']).encode('utf-8') notebook_name = os.path.splitext(notebook_filename)[0] - tar_filename = '{}.tar.gz'.format(notebook_name) - + tar_filename = f'{notebook_name}.tar.gz' + info = tarfile.TarInfo(notebook_filename) info.size = len(notebook_content) diff --git a/notebook/bundler/tests/test_bundler_api.py b/notebook/bundler/tests/test_bundler_api.py index 6c251e5e99..0192160241 100644 --- a/notebook/bundler/tests/test_bundler_api.py +++ b/notebook/bundler/tests/test_bundler_api.py @@ -3,7 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import io from os.path import join as pjoin from notebook.tests.launchnotebook import NotebookTestBase @@ -14,7 +13,7 @@ from unittest.mock import patch - + def bundle(handler, model): """Bundler test stub. Echo the notebook path.""" handler.finish(model['path']) @@ -25,17 +24,17 @@ class BundleAPITest(NotebookTestBase): def setup_class(cls): """Make a test notebook. Borrowed from nbconvert test. Assumes the class teardown will clean it up in the end.""" - super(BundleAPITest, cls).setup_class() + super().setup_class() nbdir = cls.notebook_dir nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) + nb.cells.append(new_markdown_cell('Created by test')) + cc1 = new_code_cell(source='print(2*6)') + cc1.outputs.append(new_output(output_type="stream", text='12')) nb.cells.append(cc1) - - with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', + + with open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) @@ -68,7 +67,7 @@ def test_bundler_import_error(self): mock.assert_called_with('fake_bundler') self.assertEqual(resp.status_code, 500) self.assertIn('Could not import bundler fake_bundler', resp.text) - + def test_bundler_invoke(self): """Should respond with 200 and output from test bundler stub""" with patch('notebook.bundler.handlers.BundlerHandler.get_bundler') as mock: diff --git a/notebook/config_manager.py b/notebook/config_manager.py index 137cbd691c..a8e4ee26b5 100644 --- a/notebook/config_manager.py +++ b/notebook/config_manager.py @@ -5,7 +5,6 @@ import errno import glob -import io import json import os import copy @@ -97,7 +96,7 @@ def get(self, section_name, include_root=True): data = {} for path in paths: if os.path.isfile(path): - with io.open(path, encoding='utf-8') as f: + with open(path, encoding='utf-8') as f: recursive_update(data, json.load(f)) return data @@ -117,7 +116,7 @@ def set(self, section_name, data): # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) - f = io.open(filename, 'w', encoding='utf-8') + f = open(filename, 'w', encoding='utf-8') with f: f.write(json_content) diff --git a/notebook/edit/handlers.py b/notebook/edit/handlers.py index 9ed9a9c380..ebd1a5ab2a 100644 --- a/notebook/edit/handlers.py +++ b/notebook/edit/handlers.py @@ -13,7 +13,7 @@ class EditorHandler(IPythonHandler): def get(self, path): path = path.strip('/') if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, f'File does not exist: {path}') basename = path.rsplit('/', 1)[-1] self.write(self.render_template('edit.html', @@ -25,4 +25,4 @@ def get(self, path): default_handlers = [ (r"/edit%s" % path_regex, EditorHandler), -] \ No newline at end of file +] diff --git a/notebook/extensions.py b/notebook/extensions.py index 8efa65724b..882ca4cfb4 100644 --- a/notebook/extensions.py +++ b/notebook/extensions.py @@ -65,7 +65,11 @@ class BaseExtensionApp(JupyterApp): def _verbose_changed(self): """Warn about verbosity changes""" import warnings - warnings.warn("`verbose` traits of `{}` has been deprecated, has no effects and will be removed in notebook 5.0.".format(type(self).__name__), DeprecationWarning) + warnings.warn( + f"`verbose` traits of `{type(self).__name__}` has been deprecated, " + f"has no effects and will be removed in notebook 5.0.", + DeprecationWarning + ) def _log_format_default(self): """A default format for messages""" diff --git a/notebook/gateway/handlers.py b/notebook/gateway/handlers.py index d774ba39e2..f65597efd9 100644 --- a/notebook/gateway/handlers.py +++ b/notebook/gateway/handlers.py @@ -101,7 +101,7 @@ def write_message(self, message, binary=False): super().write_message(message, binary=binary) elif self.log.isEnabledFor(logging.DEBUG): msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) - self.log.debug("Notebook client closed websocket connection - message dropped: {}".format(msg_summary)) + self.log.debug(f"Notebook client closed websocket connection - message dropped: {msg_summary}") def on_close(self): self.log.debug("Closing websocket connection %s", self.request.path) @@ -112,7 +112,7 @@ def on_close(self): def _get_message_summary(message): summary = [] message_type = message['msg_type'] - summary.append('type: {}'.format(message_type)) + summary.append(f'type: {message_type}') if message_type == 'status': summary.append(', state: {}'.format(message['content']['execution_state'])) @@ -146,7 +146,7 @@ def _connect(self, kernel_id): GatewayClient.instance().ws_url, GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), 'channels' ) - self.log.info('Connecting to {}'.format(ws_url)) + self.log.info(f'Connecting to {ws_url}') kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) @@ -158,7 +158,7 @@ def _connection_done(self, fut): if not self.disconnected and fut.exception() is None: # prevent concurrent.futures._base.CancelledError self.ws = fut.result() self.retry = 0 - self.log.debug("Connection is ready: ws: {}".format(self.ws)) + self.log.debug(f"Connection is ready: ws: {self.ws}") else: self.log.warning("Websocket connection has been closed via client disconnect or due to error. " "Kernel with ID '{}' may not be terminated on GatewayClient: {}". @@ -172,7 +172,7 @@ def _disconnect(self): elif not self.ws_future.done(): # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally self.ws_future.cancel() - self.log.debug("_disconnect: future cancelled, disconnected: {}".format(self.disconnected)) + self.log.debug(f"_disconnect: future cancelled, disconnected: {self.disconnected}") @gen.coroutine def _read_messages(self, callback): @@ -183,10 +183,10 @@ def _read_messages(self, callback): try: message = yield self.ws.read_message() except Exception as e: - self.log.error("Exception reading message from websocket: {}".format(e)) # , exc_info=True) + self.log.error(f"Exception reading message from websocket: {e}") # , exc_info=True) if message is None: if not self.disconnected: - self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) + self.log.warning(f"Lost connection to Gateway: {self.kernel_id}") break callback(message) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) else: # ws cancelled - stop reading @@ -231,7 +231,7 @@ def _write_message(self, message): if not self.disconnected and self.ws is not None: self.ws.write_message(message) except Exception as e: - self.log.error("Exception writing message to websocket: {}".format(e)) # , exc_info=True) + self.log.error(f"Exception writing message to websocket: {e}") # , exc_info=True) def on_close(self): """Web socket closed event.""" diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index b2ae3b30a2..d8422b1c8b 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -276,7 +276,7 @@ def init_static_args(self): self._static_args['headers'] = json.loads(self.headers) if 'Authorization' not in self._static_args['headers'].keys(): self._static_args['headers'].update({ - 'Authorization': 'token {}'.format(self.auth_token) + 'Authorization': f'token {self.auth_token}' }) self._static_args['connect_timeout'] = self.connect_timeout self._static_args['request_timeout'] = self.request_timeout @@ -640,7 +640,7 @@ async def get_kernel_spec_resource(self, kernel_name, path): The name of the desired resource """ kernel_spec_resource_url = url_path_join(self.base_resource_endpoint, str(kernel_name), str(path)) - self.log.debug("Request kernel spec resource '{}' at: {}".format(path, kernel_spec_resource_url)) + self.log.debug(f"Request kernel spec resource '{path}' at: {kernel_spec_resource_url}") try: response = await gateway_request(kernel_spec_resource_url, method='GET') except web.HTTPError as error: diff --git a/notebook/i18n/__init__.py b/notebook/i18n/__init__.py index 83f65c5fd9..0f836d2eeb 100644 --- a/notebook/i18n/__init__.py +++ b/notebook/i18n/__init__.py @@ -2,7 +2,6 @@ """ from collections import defaultdict import errno -import io import json from os.path import dirname, join as pjoin import re @@ -58,9 +57,9 @@ def parse_accept_lang_header(accept_lang): def load(language, domain='nbjs'): """Load translations from an nbjs.json file""" try: - f = io.open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), + f = open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), encoding='utf-8') - except IOError as e: + except OSError as e: if e.errno != errno.ENOENT: raise return {} diff --git a/notebook/jstest.py b/notebook/jstest.py index 2bb318af31..fc50fcf481 100644 --- a/notebook/jstest.py +++ b/notebook/jstest.py @@ -60,7 +60,7 @@ def run(self): self.buffer.write(chunk) if self.echo: sys.stdout.write(bytes_to_str(chunk)) - + os.close(self.readfd) os.close(self.writefd) @@ -87,7 +87,7 @@ def halt(self): self.join() -class TestController(object): +class TestController: """Run tests in a subprocess """ #: str, test group to be executed. @@ -110,7 +110,7 @@ def __init__(self): def setup(self): """Create temporary directories etc. - + This is only called when we know the test group will be run. Things created here may be cleaned up by self.cleanup(). """ @@ -138,11 +138,11 @@ def wait(self): def print_extra_info(self): """Print extra information about this test run. - + If we're running in parallel and showing the concise view, this is only called if the test group fails. Otherwise, it's called before the test group is started. - + The base implementation does nothing, but it can be overridden by subclasses. """ @@ -193,7 +193,7 @@ def all_js_groups(): class JSController(TestController): """Run CasperJS tests """ - + requirements = ['casperjs'] def __init__(self, section, xunit=True, engine='phantomjs', url=None): @@ -226,8 +226,8 @@ def setup(self): self.dirs.append(self.home) self.dirs.append(self.config_dir) self.dirs.append(self.nbdir) - os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a'))) - os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b'))) + os.makedirs(os.path.join(self.nbdir.name, os.path.join('sub ∂ir1', 'sub ∂ir 1a'))) + os.makedirs(os.path.join(self.nbdir.name, os.path.join('sub ∂ir2', 'sub ∂ir 1b'))) if self.xunit: self.add_xunit() @@ -317,7 +317,7 @@ def _init_server(self): 'nbserver-%i.json' % self.server.pid ) self._wait_for_server() - + def _wait_for_server(self): """Wait 30 seconds for the notebook server to start""" for i in range(300): @@ -336,14 +336,14 @@ def _wait_for_server(self): print("Notebook server-info file never arrived: %s" % self.server_info_file, file=sys.stderr ) - + def _failed_to_start(self): """Notebook server exited prematurely""" captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') print("Notebook failed to start: ", file=sys.stderr) print(self.server_command) print(captured, file=sys.stderr) - + def _load_server_info(self): """Notebook server started, load connection info from JSON""" with open(self.server_info_file) as f: @@ -377,7 +377,7 @@ def cleanup(self): print("Notebook server still running (%s)" % self.server_info_file, file=sys.stderr ) - + self.stream_capturer.halt() TestController.cleanup(self) @@ -399,11 +399,11 @@ def prepare_controllers(options): def do_run(controller, buffer_output=True): """Setup and run a test controller. - + If buffer_output is True, no output is displayed, to avoid it appearing interleaved. In this case, the caller is responsible for displaying test output on failure. - + Returns ------- controller : TestController @@ -468,7 +468,7 @@ def _add(name, value): def run_jstestall(options): """Run the entire Javascript test suite. - + This function constructs TestControllers and runs them in subprocesses. Parameters diff --git a/notebook/kernelspecs/handlers.py b/notebook/kernelspecs/handlers.py index be768b5ab7..2ec046dd64 100644 --- a/notebook/kernelspecs/handlers.py +++ b/notebook/kernelspecs/handlers.py @@ -15,7 +15,7 @@ def get(self, kernel_name, path, include_body=True): self.root = ksm.get_kernel_spec(kernel_name).resource_dir except KeyError as e: raise web.HTTPError(404, - u'Kernel spec %s not found' % kernel_name) from e + f'Kernel spec {kernel_name} not found') from e self.log.debug("Serving kernel resource from: %s", self.root) return web.StaticFileHandler.get(self, path, include_body=include_body) @@ -25,4 +25,4 @@ def head(self, kernel_name, path): default_handlers = [ (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), -] \ No newline at end of file +] diff --git a/notebook/nbconvert/handlers.py b/notebook/nbconvert/handlers.py index 24a3f093fb..35732bfa76 100644 --- a/notebook/nbconvert/handlers.py +++ b/notebook/nbconvert/handlers.py @@ -67,7 +67,7 @@ def get_exporter(format, **kwargs): Exporter = get_exporter(format) except KeyError as e: # should this be 400? - raise web.HTTPError(404, u"No exporter for format: %s" % format) from e + raise web.HTTPError(404, f"No exporter for format: {format}") from e try: return Exporter(**kwargs) diff --git a/notebook/nbconvert/tests/test_nbconvert_handlers.py b/notebook/nbconvert/tests/test_nbconvert_handlers.py index e5af13c0c4..93b83738c1 100644 --- a/notebook/nbconvert/tests/test_nbconvert_handlers.py +++ b/notebook/nbconvert/tests/test_nbconvert_handlers.py @@ -1,10 +1,8 @@ -import io import json import os from os.path import join as pjoin import shutil -import requests import pytest from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error @@ -13,8 +11,6 @@ new_notebook, new_markdown_cell, new_code_cell, new_output, ) -from ipython_genutils.testing.decorators import onlyif_cmds_exist - from base64 import encodebytes @@ -25,7 +21,7 @@ def cmd_exists(cmd): return True -class NbconvertAPI(object): +class NbconvertAPI: """Wrapper for nbconvert API calls.""" def __init__(self, request): self.request = request @@ -72,16 +68,16 @@ def cleanup_dir(): nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test ³')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) + nb.cells.append(new_markdown_cell('Created by test ³')) + cc1 = new_code_cell(source='print(2*6)') + cc1.outputs.append(new_output(output_type="stream", text='12')) cc1.outputs.append(new_output(output_type="execute_result", data={'image/png' : png_green_pixel}, execution_count=1, )) nb.cells.append(cc1) - with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', + with open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) @@ -94,13 +90,13 @@ def cleanup_dir(): def test_from_file(self): r = self.nbconvert_api.from_file('html', 'foo', 'testnb.ipynb') self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) + self.assertIn('text/html', r.headers['Content-Type']) + self.assertIn('Created by test', r.text) + self.assertIn('print', r.text) r = self.nbconvert_api.from_file('python', 'foo', 'testnb.ipynb') - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) + self.assertIn('text/x-python', r.headers['Content-Type']) + self.assertIn('print(2*6)', r.text) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -126,8 +122,8 @@ def test_from_file_download(self): ) def test_from_file_zip(self): r = self.nbconvert_api.from_file('latex', 'foo', 'testnb.ipynb', download=True) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) + self.assertIn('application/zip', r.headers['Content-Type']) + self.assertIn('.zip', r.headers['Content-Disposition']) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -138,13 +134,13 @@ def test_from_post(self): r = self.nbconvert_api.from_post(format='html', nbmodel=nbmodel) self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) + self.assertIn('text/html', r.headers['Content-Type']) + self.assertIn('Created by test', r.text) + self.assertIn('print', r.text) r = self.nbconvert_api.from_post(format='python', nbmodel=nbmodel) - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) + self.assertIn('text/x-python', r.headers['Content-Type']) + self.assertIn('print(2*6)', r.text) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -154,5 +150,5 @@ def test_from_post_zip(self): nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() r = self.nbconvert_api.from_post(format='latex', nbmodel=nbmodel) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) + self.assertIn('application/zip', r.headers['Content-Type']) + self.assertIn('.zip', r.headers['Content-Disposition']) diff --git a/notebook/nbextensions.py b/notebook/nbextensions.py index aac752cce9..98c0216693 100644 --- a/notebook/nbextensions.py +++ b/notebook/nbextensions.py @@ -35,7 +35,7 @@ def check_nbextension(files, user=False, prefix=None, nbextensions_dir=None, sys_prefix=False): """Check whether nbextension files have been installed - + Returns True if all files are found, False if any are missing. Parameters @@ -58,11 +58,11 @@ def check_nbextension(files, user=False, prefix=None, nbextensions_dir=None, sys # make sure nbextensions dir exists if not os.path.exists(nbext): return False - + if isinstance(files, string_types): # one file given, turn it into a list files = [files] - + return all(os.path.exists(pjoin(nbext, f)) for f in files) @@ -72,14 +72,14 @@ def install_nbextension(path, overwrite=False, symlink=False, logger=None, sys_prefix=False ): """Install a Javascript extension for the notebook - + Stages files and/or directories into the nbextensions directory. By default, this compares modification time, and only stages files that need updating. If `overwrite` is specified, matching files are purged before proceeding. - + Parameters ---------- - + path : path to file, directory, zip or tarball archive, or URL to install By default, the file will be installed with its base name, so '/path/to/foo' will install to 'nbextensions/foo'. See the destination argument below to change this. @@ -116,14 +116,14 @@ def install_nbextension(path, overwrite=False, symlink=False, nbext = _get_nbextension_dir(user=user, sys_prefix=sys_prefix, prefix=prefix, nbextensions_dir=nbextensions_dir) # make sure nbextensions dir exists ensure_dir_exists(nbext) - + # forcing symlink parameter to False if os.symlink does not exist (e.g., on Windows machines running python 2) if not hasattr(os, 'symlink'): symlink = False - + if isinstance(path, (list, tuple)): raise TypeError("path must be a string pointing to a single extension to install; call this function multiple times to install multiple extensions") - + path = cast_unicode_py2(path) if path.startswith(('https://', 'http://')): @@ -222,16 +222,16 @@ def install_nbextension_python(module, overwrite=False, symlink=False, return full_dests -def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, prefix=None, +def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, prefix=None, nbextensions_dir=None, logger=None): """Uninstall a Javascript extension of the notebook - - Removes staged files and/or directories in the nbextensions directory and + + Removes staged files and/or directories in the nbextensions directory and removes the extension from the frontend config. - + Parameters ---------- - + dest : str path to file, directory, zip or tarball archive, or URL to install name the nbextension is installed to. For example, if destination is 'foo', then @@ -261,7 +261,7 @@ def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, pref shutil.rmtree(full_dest) else: os.remove(full_dest) - + # Look through all of the config sections making sure that the nbextension # doesn't exist. config_dir = os.path.join(_get_config_dir(user=user, sys_prefix=sys_prefix), 'nbconfig') @@ -295,7 +295,7 @@ def uninstall_nbextension_python(module, user=False, sys_prefix=False, prefix=None, nbextensions_dir=None, logger=None): """Uninstall an nbextension bundled in a Python package. - + See parameters of `install_nbextension_python` """ m, nbexts = _get_nbextension_metadata(module) @@ -303,8 +303,8 @@ def uninstall_nbextension_python(module, dest = nbext['dest'] require = nbext['require'] if logger: - logger.info("Uninstalling {} {}".format(dest, require)) - uninstall_nbextension(dest, require, user=user, sys_prefix=sys_prefix, + logger.info(f"Uninstalling {dest} {require}") + uninstall_nbextension(dest, require, user=user, sys_prefix=sys_prefix, prefix=prefix, nbextensions_dir=nbextensions_dir, logger=logger) @@ -408,7 +408,7 @@ def enable_nbextension(section, require, user=True, sys_prefix=False, def disable_nbextension(section, require, user=True, sys_prefix=False, logger=None): """Disable a named nbextension - + Returns True if the final state is the one requested. Parameters @@ -478,7 +478,7 @@ def enable_nbextension_python(module, user=True, sys_prefix=False, def disable_nbextension_python(module, user=True, sys_prefix=False, logger=None): """Disable some nbextensions associated with a Python module. - + Returns True if the final state is the one requested. Parameters @@ -516,27 +516,27 @@ def validate_nbextension(require, logger=None): js_exists = False for exts in jupyter_path('nbextensions'): # Does the Javascript entrypoint actually exist on disk? - js = u"{}.js".format(os.path.join(exts, *require.split("/"))) + js = f"{os.path.join(exts, *require.split('/'))}.js" js_exists = os.path.exists(js) if js_exists: break - require_tmpl = u" - require? {} {}" + require_tmpl = " - require? {} {}" if js_exists: infos.append(require_tmpl.format(GREEN_OK, require)) else: warnings.append(require_tmpl.format(RED_X, require)) - + if logger: if warnings: - logger.warning(u" - Validating: problems found:") + logger.warning(" - Validating: problems found:") for msg in warnings: logger.warning(msg) for msg in infos: logger.info(msg) else: - logger.info(u" - Validating: {}".format(GREEN_OK)) - + logger.info(f" - Validating: {GREEN_OK}") + return warnings @@ -567,19 +567,19 @@ def validate_nbextension_python(spec, full_dest, logger=None): section = spec.get("section", None) if section in NBCONFIG_SECTIONS: - infos.append(u" {} section: {}".format(GREEN_OK, section)) + infos.append(f" {GREEN_OK} section: {section}") else: - warnings.append(u" {} section: {}".format(RED_X, section)) + warnings.append(f" {RED_X} section: {section}") require = spec.get("require", None) if require is not None: require_path = os.path.join( full_dest[0:-len(spec["dest"])], - u"{}.js".format(require)) + f"{require}.js") if os.path.exists(require_path): - infos.append(u" {} require: {}".format(GREEN_OK, require_path)) + infos.append(f" {GREEN_OK} require: {require_path}") else: - warnings.append(u" {} require: {}".format(RED_X, require_path)) + warnings.append(f" {RED_X} require: {require_path}") if logger: if warnings: @@ -588,9 +588,9 @@ def validate_nbextension_python(spec, full_dest, logger=None): logger.warning(msg) for msg in infos: logger.info(msg) - logger.warning(u"Full spec: {}".format(spec)) + logger.warning(f"Full spec: {spec}") else: - logger.info(u"- Validating: {}".format(GREEN_OK)) + logger.info(f"- Validating: {GREEN_OK}") return warnings @@ -633,24 +633,24 @@ def validate_nbextension_python(spec, full_dest, logger=None): class InstallNBExtensionApp(BaseExtensionApp): """Entry point for installing notebook extensions""" description = """Install Jupyter notebook extensions - + Usage - + jupyter nbextension install path|url [--user|--sys-prefix] - + This copies a file or a folder into the Jupyter nbextensions directory. If a URL is given, it will be downloaded. If an archive is given, it will be extracted into nbextensions. If the requested files are already up to date, no action is taken unless --overwrite is specified. """ - + examples = """ jupyter nbextension install /path/to/myextension """ aliases = aliases flags = flags - + overwrite = Bool(False, config=True, help="Force overwrite of existing files") symlink = Bool(False, config=True, help="Create symlinks instead of copying files") @@ -662,7 +662,7 @@ class InstallNBExtensionApp(BaseExtensionApp): def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def install_extensions(self): """Perform the installation of nbextension(s)""" if len(self.extra_args)>1: @@ -675,7 +675,7 @@ def install_extensions(self): else: install = install_nbextension kwargs = {'destination': self.destination} - + full_dests = install(self.extra_args[0], overwrite=self.overwrite, symlink=self.symlink, @@ -689,7 +689,7 @@ def install_extensions(self): if full_dests: self.log.info( - u"\nTo initialize this nbextension in the browser every time" + "\nTo initialize this nbextension in the browser every time" " the notebook (or other app) loads:\n\n" " jupyter nbextension enable {}{}{}{}\n".format( self.extra_args[0] if self.python else "", @@ -714,9 +714,9 @@ class UninstallNBExtensionApp(BaseExtensionApp): """Entry point for uninstalling notebook extensions""" version = __version__ description = """Uninstall Jupyter notebook extensions - + Usage - + jupyter nbextension uninstall path/url path/url/entrypoint jupyter nbextension uninstall --py pythonPackageName @@ -734,12 +734,12 @@ class UninstallNBExtensionApp(BaseExtensionApp): It will uninstall nbextensions listed in that module, but not the module itself (which you should uninstall using a package manager such as pip). """ - + examples = """ jupyter nbextension uninstall dest/dir dest/dir/extensionjs jupyter nbextension uninstall --py extensionPyPackage """ - + aliases = { "prefix" : "UninstallNBExtensionApp.prefix", "nbextensions" : "UninstallNBExtensionApp.nbextensions_dir", @@ -748,7 +748,7 @@ class UninstallNBExtensionApp(BaseExtensionApp): flags = BaseExtensionApp.flags.copy() flags['system'] = ({'UninstallNBExtensionApp': {'system': True}}, "Uninstall specifically from systemwide installation directory") - + prefix = Unicode('', config=True, help="Installation prefix. Overrides --user, --sys-prefix and --system" ) @@ -759,7 +759,7 @@ class UninstallNBExtensionApp(BaseExtensionApp): system = Bool(False, config=True, help="Uninstall specifically from systemwide installation directory" ) - + def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' @@ -837,13 +837,13 @@ class ToggleNBExtensionApp(BaseExtensionApp): user = Bool(True, config=True, help="Apply the configuration only for the current user (default)") aliases = {'section': 'ToggleNBExtensionApp.section'} - + _toggle_value = None def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def toggle_nbextension_python(self, module): """Toggle some extensions in an importable Python module. @@ -878,7 +878,7 @@ def toggle_nbextension(self, require): return toggle(self.section, require, user=self.user, sys_prefix=self.sys_prefix, logger=self.log) - + def start(self): if not self.extra_args: sys.exit('Please specify an nbextension/package to enable or disable') @@ -895,7 +895,7 @@ class EnableNBExtensionApp(ToggleNBExtensionApp): name = "jupyter nbextension enable" description = """ Enable an nbextension in frontend configuration. - + Usage jupyter nbextension enable [--system|--sys-prefix] """ @@ -907,7 +907,7 @@ class DisableNBExtensionApp(ToggleNBExtensionApp): name = "jupyter nbextension disable" description = """ Disable an nbextension in frontend configuration. - + Usage jupyter nbextension disable [--system|--sys-prefix] """ @@ -919,15 +919,15 @@ class ListNBExtensionsApp(BaseExtensionApp): name = "jupyter nbextension list" version = __version__ description = "List all nbextensions known by the configuration system" - + def list_nbextensions(self): """List all the nbextensions""" config_dirs = [os.path.join(p, 'nbconfig') for p in jupyter_config_path()] - + print("Known nbextensions:") - + for config_dir in config_dirs: - head = u' config dir: {}'.format(config_dir) + head = f' config dir: {config_dir}' head_shown = False cm = BaseJSONConfigManager(parent=self, config_dir=config_dir) @@ -938,15 +938,13 @@ def list_nbextensions(self): # only show heading if there is an nbextension here print(head) head_shown = True - print(u' {} section'.format(section)) - + print(f' {section} section') + for require, enabled in data['load_extensions'].items(): - print(u' {} {}'.format( - require, - GREEN_ENABLED if enabled else RED_DISABLED)) + print(f' {require} {GREEN_ENABLED if enabled else RED_DISABLED}') if enabled: validate_nbextension(require, logger=self.log) - + def start(self): """Perform the App's functions as configured""" self.list_nbextensions() @@ -1051,7 +1049,7 @@ def _safe_is_tarfile(path): """ try: return tarfile.is_tarfile(path) - except IOError: + except OSError: return False @@ -1076,15 +1074,15 @@ def _get_nbextension_dir(user=False, sys_prefix=False, prefix=None, nbextensions ('nbextensions_dir', nbextensions_dir), ('sys_prefix', sys_prefix), ] - conflicting_set = ['{}={!r}'.format(n, v) for n, v in conflicting if v] + conflicting_set = [f'{n}={v!r}' for n, v in conflicting if v] if len(conflicting_set) > 1: raise ArgumentConflict( "cannot specify more than one of user, sys_prefix, prefix, or nbextensions_dir, but got: {}" .format(', '.join(conflicting_set))) if user: - nbext = pjoin(jupyter_data_dir(), u'nbextensions') + nbext = pjoin(jupyter_data_dir(), 'nbextensions') elif sys_prefix: - nbext = pjoin(ENV_JUPYTER_PATH[0], u'nbextensions') + nbext = pjoin(ENV_JUPYTER_PATH[0], 'nbextensions') elif prefix: nbext = pjoin(prefix, 'share', 'jupyter', 'nbextensions') elif nbextensions_dir: diff --git a/notebook/notebook/handlers.py b/notebook/notebook/handlers.py index 638931d6be..d2a7ef4e23 100644 --- a/notebook/notebook/handlers.py +++ b/notebook/notebook/handlers.py @@ -52,7 +52,7 @@ def get_frontend_exporters(): # Ensure default_exporters are in frontend_exporters if not already # This protects against nbconvert versions lower than 5.5 - names = set(exporter.name.lower() for exporter in frontend_exporters) + names = {exporter.name.lower() for exporter in frontend_exporters} for exporter in default_exporters: if exporter.name not in names: frontend_exporters.append(exporter) @@ -74,11 +74,11 @@ class NotebookHandler(IPythonHandler): @web.authenticated @gen.coroutine def get(self, path): - """get renders the notebook template if a name is given, or + """get renders the notebook template if a name is given, or redirects to the '/files/' handler if the name is not given.""" path = path.strip('/') cm = self.contents_manager - + # will raise 404 on not found try: model = yield maybe_future(cm.get(path, content=False)) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index e07bfcf507..4a7464cd0a 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -14,7 +14,6 @@ import hmac import importlib import inspect -import io import ipaddress import json import logging @@ -115,7 +114,6 @@ url_path_join, urldecode_unix_socket_path, urlencode_unix_socket, - urlencode_unix_socket_path, urljoin, ) from .traittypes import TypeFromClasses @@ -507,7 +505,7 @@ class NbserverStopApp(JupyterApp): port = Integer(DEFAULT_NOTEBOOK_PORT, config=True, help="Port of the server to be killed. Default %s" % DEFAULT_NOTEBOOK_PORT) - sock = Unicode(u'', config=True, + sock = Unicode('', config=True, help="UNIX socket of the server to be killed.") def parse_command_line(self, argv=None): @@ -539,7 +537,7 @@ def _shutdown_or_exit(self, target_endpoint, server): def _maybe_remove_unix_socket(socket_path): try: os.unlink(socket_path) - except (OSError, IOError): + except OSError: pass def start(self): @@ -563,7 +561,7 @@ def start(self): else: current_endpoint = self.sock or self.port print( - "There is currently no server running on {}".format(current_endpoint), + f"There is currently no server running on {current_endpoint}", file=sys.stderr ) print("Ports/sockets currently in use:", file=sys.stderr) @@ -753,7 +751,7 @@ def _default_log_datefmt(self): @default('log_format') def _default_log_format(self): """override default log format to include time""" - return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" + return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" ignore_minified_js = Bool(False, config=True, @@ -834,7 +832,7 @@ def _default_ip(self): s = socket.socket() try: s.bind(('localhost', 0)) - except socket.error as e: + except OSError as e: self.log.warning(_("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e) return '127.0.0.1' else: @@ -844,11 +842,11 @@ def _default_ip(self): @validate('ip') def _validate_ip(self, proposal): value = proposal['value'] - if value == u'*': - value = u'' + if value == '*': + value = '' return value - custom_display_url = Unicode(u'', config=True, + custom_display_url = Unicode('', config=True, help=_("""Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, @@ -883,7 +881,7 @@ def port_retries_default(self): return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) - sock = Unicode(u'', config=True, + sock = Unicode('', config=True, help=_("The UNIX socket the notebook server will listen on.") ) @@ -914,15 +912,15 @@ def _validate_sock_mode(self, proposal): return value - certfile = Unicode(u'', config=True, + certfile = Unicode('', config=True, help=_("""The full path to an SSL/TLS certificate file.""") ) - keyfile = Unicode(u'', config=True, + keyfile = Unicode('', config=True, help=_("""The full path to a private key file for usage with SSL/TLS.""") ) - client_ca = Unicode(u'', config=True, + client_ca = Unicode('', config=True, help=_("""The full path to a certificate authority certificate for SSL/TLS client authentication.""") ) @@ -947,7 +945,7 @@ def _default_cookie_secret_file(self): @default('cookie_secret') def _default_cookie_secret(self): if os.path.exists(self.cookie_secret_file): - with io.open(self.cookie_secret_file, 'rb') as f: + with open(self.cookie_secret_file, 'rb') as f: key = f.read() else: key = encodebytes(os.urandom(32)) @@ -960,7 +958,7 @@ def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info(_("Writing notebook server cookie secret to %s"), self.cookie_secret_file) try: - with io.open(self.cookie_secret_file, 'wb') as f: + with open(self.cookie_secret_file, 'wb') as f: f.write(secret) except OSError as e: self.log.error(_("Failed to write cookie secret to %s: %s"), @@ -995,12 +993,12 @@ def _token_default(self): return os.getenv('JUPYTER_TOKEN') if os.getenv('JUPYTER_TOKEN_FILE'): self._token_generated = False - with io.open(os.getenv('JUPYTER_TOKEN_FILE'), "r") as token_file: + with open(os.getenv('JUPYTER_TOKEN_FILE')) as token_file: return token_file.read() if self.password: # no token if password is enabled self._token_generated = False - return u'' + return '' else: self._token_generated = True return binascii.hexlify(os.urandom(24)).decode('ascii') @@ -1051,7 +1049,7 @@ def _default_min_open_files_limit(self): def _token_changed(self, change): self._token_generated = False - password = Unicode(u'', config=True, + password = Unicode('', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: @@ -1166,7 +1164,7 @@ def _default_allow_remote(self): (NotebookApp.browser) configuration option. """) - browser = Unicode(u'', config=True, + browser = Unicode('', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` @@ -1252,7 +1250,7 @@ def _update_webapp_settings(self, change): def _update_enable_mathjax(self, change): """set mathjax url to empty if mathjax is disabled""" if not change['new']: - self.mathjax_url = u'' + self.mathjax_url = '' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. @@ -1351,7 +1349,7 @@ def nbextensions_path(self): @default('mathjax_url') def _default_mathjax_url(self): if not self.enable_mathjax: - return u'' + return '' static_url_prefix = self.tornado_settings.get("static_url_prefix", "static") return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js') @@ -1360,7 +1358,7 @@ def _update_mathjax_url(self, change): new = change['new'] if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url - self.mathjax_url = u'' + self.mathjax_url = '' else: self.log.info(_("Using MathJax: %s"), new) @@ -1701,7 +1699,7 @@ def init_resources(self): if hard < soft: hard = soft self.log.debug( - 'Raising open file limit: soft {}->{}; hard {}->{}'.format(old_soft, soft, old_hard, hard) + f'Raising open file limit: soft {old_soft}->{soft}; hard {old_hard}->{hard}' ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) @@ -1803,7 +1801,7 @@ def _bind_http_server_unix(self): try: sock = bind_unix_socket(self.sock, mode=int(self.sock_mode.encode(), 8)) self.http_server.add_socket(sock) - except socket.error as e: + except OSError as e: if e.errno == errno.EADDRINUSE: self.log.warning(_('The socket %s is already in use.') % self.sock) return False @@ -1820,7 +1818,7 @@ def _bind_http_server_tcp(self): for port in random_ports(self.port, self.port_retries+1): try: self.http_server.listen(port, self.ip) - except socket.error as e: + except OSError as e: eacces = (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)) if sys.platform == 'cygwin': # Cygwin has a bug that causes EPERM to be returned in this @@ -2411,7 +2409,7 @@ def list_running_servers(runtime_dir=None): for file_name in os.listdir(runtime_dir): if re.match('nbserver-(.+).json', file_name): - with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: + with open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running diff --git a/notebook/prometheus/log_functions.py b/notebook/prometheus/log_functions.py index a67a252ade..f82e0f970e 100644 --- a/notebook/prometheus/log_functions.py +++ b/notebook/prometheus/log_functions.py @@ -19,6 +19,6 @@ def prometheus_log_method(handler): """ HTTP_REQUEST_DURATION_SECONDS.labels( method=handler.request.method, - handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__), + handler=f'{handler.__class__.__module__}.{type(handler).__name__}', status_code=handler.get_status() ).observe(handler.request.request_time()) diff --git a/notebook/serverextensions.py b/notebook/serverextensions.py index 72020a24f5..65d98011d9 100644 --- a/notebook/serverextensions.py +++ b/notebook/serverextensions.py @@ -59,14 +59,14 @@ def toggle_serverextension_python(import_name, enabled=None, parent=None, if logger: if new_enabled: - logger.info(u"Enabling: %s" % (import_name)) + logger.info(f"Enabling: {import_name}") else: - logger.info(u"Disabling: %s" % (import_name)) + logger.info(f"Disabling: {import_name}") server_extensions[import_name] = new_enabled if logger: - logger.info(u"- Writing config: {}".format(config_dir)) + logger.info(f"- Writing config: {config_dir}") cm.update("jupyter_notebook_config", cfg) @@ -104,13 +104,13 @@ def validate_serverextension(import_name, logger=None): except Exception: logger.warning("Error loading server extension %s", import_name) - import_msg = u" {} is {} importable?" + import_msg = " {} is {} importable?" if func is not None: infos.append(import_msg.format(GREEN_OK, import_name)) else: warnings.append(import_msg.format(RED_X, import_name)) - post_mortem = u" {} {} {}" + post_mortem = " {} {} {}" if logger: if warnings: [logger.info(info) for info in infos] @@ -159,13 +159,13 @@ class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" name = "jupyter serverextension enable/disable" description = "Enable/disable a server extension using frontend configuration files." - + flags = flags user = Bool(True, config=True, help="Whether to do a user install") sys_prefix = Bool(False, config=True, help="Use the sys.prefix as the prefix") python = Bool(False, config=True, help="Install from a Python package") - + def toggle_server_extension(self, import_name): """Change the status of a named server extension. @@ -215,7 +215,7 @@ class EnableServerExtensionApp(ToggleServerExtensionApp): name = "jupyter serverextension enable" description = """ Enable a serverextension in configuration. - + Usage jupyter serverextension enable [--system|--sys-prefix] """ @@ -227,7 +227,7 @@ class DisableServerExtensionApp(ToggleServerExtensionApp): name = "jupyter serverextension disable" description = """ Disable a serverextension in configuration. - + Usage jupyter serverextension disable [--system|--sys-prefix] """ @@ -254,11 +254,9 @@ def list_server_extensions(self): .setdefault("nbserver_extensions", {}) ) if server_extensions: - print(u'config dir: {}'.format(config_dir)) + print(f'config dir: {config_dir}') for import_name, enabled in server_extensions.items(): - print(u' {} {}'.format( - import_name, - GREEN_ENABLED if enabled else RED_DISABLED)) + print(f' {import_name} {GREEN_ENABLED if enabled else RED_DISABLED}') validate_serverextension(import_name, self.log) def start(self): @@ -324,7 +322,7 @@ def _get_server_extension_metadata(module): """ m = import_item(module) if not hasattr(m, '_jupyter_server_extension_paths'): - raise KeyError(u'The Python module {} does not include any valid server extensions'.format(module)) + raise KeyError(f'The Python module {module} does not include any valid server extensions') return m, m._jupyter_server_extension_paths() if __name__ == '__main__': diff --git a/notebook/services/config/handlers.py b/notebook/services/config/handlers.py index aae6480757..245b344bf3 100644 --- a/notebook/services/config/handlers.py +++ b/notebook/services/config/handlers.py @@ -3,9 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -import os -import io -import errno from tornado import web from ...base.handlers import APIHandler diff --git a/notebook/services/config/tests/test_config_api.py b/notebook/services/config/tests/test_config_api.py index a4df8f40fc..04294c15e6 100644 --- a/notebook/services/config/tests/test_config_api.py +++ b/notebook/services/config/tests/test_config_api.py @@ -8,7 +8,7 @@ from notebook.tests.launchnotebook import NotebookTestBase -class ConfigAPI(object): +class ConfigAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request diff --git a/notebook/services/contents/checkpoints.py b/notebook/services/contents/checkpoints.py index c29a669c22..fd80ef8c0e 100644 --- a/notebook/services/contents/checkpoints.py +++ b/notebook/services/contents/checkpoints.py @@ -53,7 +53,7 @@ def delete_all_checkpoints(self, path): self.delete_checkpoint(checkpoint['id'], path) -class GenericCheckpointsMixin(object): +class GenericCheckpointsMixin: """ Helper for creating Checkpoints subclasses that can be used with any ContentsManager. @@ -90,7 +90,7 @@ def create_checkpoint(self, contents_mgr, path): path, ) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, f'Unexpected type {type}') def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" @@ -100,7 +100,7 @@ def restore_checkpoint(self, contents_mgr, checkpoint_id, path): elif type == 'file': model = self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, f'Unexpected type {type}') contents_mgr.save(model, path) # Required Methods diff --git a/notebook/services/contents/filecheckpoints.py b/notebook/services/contents/filecheckpoints.py index 5a9c835749..540091fd50 100644 --- a/notebook/services/contents/filecheckpoints.py +++ b/notebook/services/contents/filecheckpoints.py @@ -50,7 +50,7 @@ def _root_dir_default(self): # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - checkpoint_id = u'checkpoint' + checkpoint_id = 'checkpoint' src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) @@ -107,11 +107,7 @@ def checkpoint_path(self, checkpoint_id, path): parent, name = ('/' + path).rsplit('/', 1) parent = parent.strip('/') basename, ext = os.path.splitext(name) - filename = u"{name}-{checkpoint_id}{ext}".format( - name=basename, - checkpoint_id=checkpoint_id, - ext=ext, - ) + filename = f"{basename}-{checkpoint_id}{ext}" os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): @@ -133,7 +129,7 @@ def checkpoint_model(self, checkpoint_id, os_path): def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError( 404, - u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) + f'Checkpoint does not exist: {path}@{checkpoint_id}' ) @@ -146,7 +142,7 @@ def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip('/') # only the one checkpoint ID: - checkpoint_id = u"checkpoint" + checkpoint_id = "checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): @@ -159,7 +155,7 @@ def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip('/') # only the one checkpoint ID: - checkpoint_id = u"checkpoint" + checkpoint_id = "checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): diff --git a/notebook/services/contents/fileio.py b/notebook/services/contents/fileio.py index f99504e32b..886ccbebb5 100644 --- a/notebook/services/contents/fileio.py +++ b/notebook/services/contents/fileio.py @@ -7,7 +7,6 @@ from contextlib import contextmanager import errno -import io import os import shutil @@ -102,9 +101,9 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + fileobj = open(path, 'w', encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = open(path, 'wb', **kwargs) try: yield fileobj @@ -154,9 +153,9 @@ def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + fileobj = open(path, 'w', encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = open(path, 'wb', **kwargs) try: yield fileobj @@ -197,7 +196,7 @@ class FileManagerMixin(Configurable): def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" with self.perm_to_403(os_path): - with io.open(os_path, *args, **kwargs) as f: + with open(os_path, *args, **kwargs) as f: yield f @contextmanager @@ -218,7 +217,7 @@ def perm_to_403(self, os_path=''): """context manager for turning permission errors into 403.""" try: yield - except (OSError, IOError) as e: + except OSError as e: if e.errno in {errno.EPERM, errno.EACCES}: # make 403 error message without root prefix # this may not work perfectly on unicode paths on Python 2, @@ -226,7 +225,7 @@ def perm_to_403(self, os_path=''): if not os_path: os_path = str_to_unicode(e.filename or 'unknown file') path = to_api_path(os_path, root=self.root_dir) - raise HTTPError(403, u'Permission denied: %s' % path) from e + raise HTTPError(403, f'Permission denied: {path}') from e else: raise @@ -276,7 +275,7 @@ def _read_notebook(self, os_path, as_version=4): if not self.use_atomic_writing or not os.path.exists(tmp_path): raise HTTPError( 400, - u"Unreadable Notebook: %s %r" % (os_path, e_orig), + f"Unreadable Notebook: {os_path} {e_orig!r}", ) # Move the bad file aside, restore the intermediate, and try again. @@ -334,7 +333,7 @@ def _save_file(self, os_path, content, format): bcontent = decodebytes(b64_bytes) except Exception as e: raise HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) + 400, f'Encoding error saving {os_path}: {e}' ) from e with self.atomic_writing(os_path, text=False) as f: diff --git a/notebook/services/contents/filemanager.py b/notebook/services/contents/filemanager.py index 0c9386b2fc..a9a9c0014a 100644 --- a/notebook/services/contents/filemanager.py +++ b/notebook/services/contents/filemanager.py @@ -5,7 +5,6 @@ from datetime import datetime import errno -import io import os import shutil import stat @@ -60,7 +59,7 @@ def _post_save_script(model, os_path, contents_manager, **kwargs): script, resources = _script_exporter.from_filename(os_path) script_fname = base + resources.get('output_extension', '.txt') log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) - with io.open(script_fname, 'w', encoding='utf-8') as f: + with open(script_fname, 'w', encoding='utf-8') as f: f.write(script) @@ -132,8 +131,7 @@ def run_post_save_hook(self, model, os_path): self.post_save_hook(os_path=os_path, model=model, contents_manager=self) except Exception as e: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while running post hook save: %s' - % e) from e + raise web.HTTPError(500, f'Unexpected error while running post hook save: {e}') from e @validate('root_dir') def _validate_root_dir(self, proposal): @@ -243,14 +241,14 @@ def _base_model(self, path): """Build the common base of a contents model""" os_path = self._get_os_path(path) info = os.lstat(os_path) - + try: - # size of file + # size of file size = info.st_size except (ValueError, OSError): self.log.warning('Unable to get size.') size = None - + try: last_modified = tz.utcfromtimestamp(info.st_mtime) except (ValueError, OSError): @@ -292,7 +290,7 @@ def _dir_model(self, path, content=True): """ os_path = self._get_os_path(path) - four_o_four = u'directory does not exist: %r' % path + four_o_four = 'directory does not exist: %r' % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) @@ -392,14 +390,14 @@ def _notebook_model(self, path, content=True): model = self._base_model(path) model['type'] = 'notebook' os_path = self._get_os_path(path) - + if content: nb = self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) model['content'] = nb model['format'] = 'json' self.validate_notebook_model(model) - + return model def get(self, path, content=True, type=None, format=None): @@ -427,32 +425,32 @@ def get(self, path, content=True, type=None, format=None): path = path.strip('/') if not self.exists(path): - raise web.HTTPError(404, u'No such file or directory: %s' % path) + raise web.HTTPError(404, f'No such file or directory: {path}') os_path = self._get_os_path(path) if os.path.isdir(os_path): if type not in (None, 'directory'): raise web.HTTPError(400, - u'%s is a directory, not a %s' % (path, type), reason='bad type') + f'{path} is a directory, not a {type}', reason='bad type') model = self._dir_model(path, content=content) elif type == 'notebook' or (type is None and path.endswith('.ipynb')): model = self._notebook_model(path, content=content) else: if type == 'directory': raise web.HTTPError(400, - u'%s is not a directory' % path, reason='bad type') + '%s is not a directory' % path, reason='bad type') model = self._file_model(path, content=content, format=format) return model def _save_directory(self, os_path, model, path=''): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) + raise web.HTTPError(400, f'Cannot create hidden directory {os_path!r}') if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) elif not os.path.isdir(os_path): - raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) + raise web.HTTPError(400, 'Not a directory: %s' % (os_path)) else: self.log.debug("Directory %r already exists", os_path) @@ -461,9 +459,9 @@ def save(self, model, path=''): path = path.strip('/') if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') + raise web.HTTPError(400, 'No file type provided') if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + raise web.HTTPError(400, 'No file content provided') os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) @@ -488,8 +486,8 @@ def save(self, model, path=''): except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % + self.log.error('Error while saving file: %s %s', path, e, exc_info=True) + raise web.HTTPError(500, 'Unexpected error while saving file: %s %s' % (path, e)) from e validation_message = None @@ -511,7 +509,7 @@ def delete_file(self, path): os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): - raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) + raise web.HTTPError(404, 'File or directory does not exist: %s' % os_path) def is_non_empty_dir(os_path): if os.path.isdir(os_path): @@ -527,7 +525,7 @@ def is_non_empty_dir(os_path): if sys.platform == 'win32' and is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, 'Directory %s not empty' % os_path) try: self.log.debug("Sending %s to trash", os_path) send2trash(os_path) @@ -538,7 +536,7 @@ def is_non_empty_dir(os_path): if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if is_non_empty_dir(os_path): - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, 'Directory %s not empty' % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): shutil.rmtree(os_path) @@ -563,7 +561,7 @@ def rename_file(self, old_path, new_path): # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): - raise web.HTTPError(409, u'File already exists: %s' % new_path) + raise web.HTTPError(409, f'File already exists: {new_path}') # Move the file try: @@ -572,8 +570,7 @@ def rename_file(self, old_path, new_path): except web.HTTPError: raise except Exception as e: - raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % - (old_path, e)) from e + raise web.HTTPError(500, f'Unknown error renaming file: {old_path} {e}') from e def info_string(self): return _("Serving notebooks from local directory: %s") % self.root_dir @@ -604,6 +601,6 @@ def _validate_path(path): for char in invalid_chars: if char in path: - raise web.HTTPError(400, "Path '{}' contains characters that are invalid for the filesystem. " - "Path names on this filesystem cannot contain any of the following " - "characters: {}".format(path, invalid_chars)) + raise web.HTTPError(400, f"Path '{path}' contains characters that are invalid for the filesystem. " + f"Path names on this filesystem cannot contain any of the following " + f"characters: {invalid_chars}") diff --git a/notebook/services/contents/handlers.py b/notebook/services/contents/handlers.py index b3216335bb..90aca566d9 100644 --- a/notebook/services/contents/handlers.py +++ b/notebook/services/contents/handlers.py @@ -45,7 +45,7 @@ def validate_model(model, expect_content): if missing: raise web.HTTPError( 500, - u"Missing Model Keys: {missing}".format(missing=missing), + f"Missing Model Keys: {missing}", ) maybe_none_keys = ['content', 'format'] @@ -54,7 +54,7 @@ def validate_model(model, expect_content): if errors: raise web.HTTPError( 500, - u"Keys unexpectedly None: {keys}".format(keys=errors), + f"Keys unexpectedly None: {errors}", ) else: errors = { @@ -65,7 +65,7 @@ def validate_model(model, expect_content): if errors: raise web.HTTPError( 500, - u"Keys unexpectedly not None: {keys}".format(keys=errors), + f"Keys unexpectedly not None: {errors}", ) @@ -103,14 +103,14 @@ def get(self, path=''): path = path or '' type = self.get_query_argument('type', default=None) if type not in {None, 'directory', 'file', 'notebook'}: - raise web.HTTPError(400, u'Type %r is invalid' % type) + raise web.HTTPError(400, f'Type {type!r} is invalid') format = self.get_query_argument('format', default=None) if format not in {None, 'text', 'base64'}: - raise web.HTTPError(400, u'Format %r is invalid' % format) + raise web.HTTPError(400, f'Format {format!r} is invalid') content = self.get_query_argument('content', default='1') if content not in {'0', '1'}: - raise web.HTTPError(400, u'Content %r is invalid' % content) + raise web.HTTPError(400, f'Content {content!r} is invalid') content = int(content) model = yield maybe_future(self.contents_manager.get( @@ -126,7 +126,7 @@ def patch(self, path=''): cm = self.contents_manager model = self.get_json_body() if model is None: - raise web.HTTPError(400, u'JSON body missing') + raise web.HTTPError(400, 'JSON body missing') model = yield maybe_future(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @@ -134,10 +134,7 @@ def patch(self, path=''): @gen.coroutine def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" - self.log.info(u"Copying {copy_from} to {copy_to}".format( - copy_from=copy_from, - copy_to=copy_to or '', - )) + self.log.info(f"Copying {copy_from} to {copy_to or ''}") model = yield maybe_future(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) @@ -146,7 +143,7 @@ def _copy(self, copy_from, copy_to=None): @gen.coroutine def _upload(self, model, path): """Handle upload of a new file to path""" - self.log.info(u"Uploading file to %s", path) + self.log.info("Uploading file to %s", path) model = yield maybe_future(self.contents_manager.new(model, path)) self.set_status(201) validate_model(model, expect_content=False) @@ -155,7 +152,7 @@ def _upload(self, model, path): @gen.coroutine def _new_untitled(self, path, type='', ext=''): """Create a new, empty untitled entity""" - self.log.info(u"Creating new %s in %s", type or 'file', path) + self.log.info("Creating new %s in %s", type or 'file', path) model = yield maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext)) self.set_status(201) validate_model(model, expect_content=False) @@ -166,7 +163,7 @@ def _save(self, model, path): """Save an existing file.""" chunk = model.get("chunk", None) if not chunk or chunk == -1: # Avoid tedious log information - self.log.info(u"Saving file at %s", path) + self.log.info("Saving file at %s", path) model = yield maybe_future(self.contents_manager.save(model, path)) validate_model(model, expect_content=False) self._finish_model(model) diff --git a/notebook/services/contents/largefilemanager.py b/notebook/services/contents/largefilemanager.py index 6779a0b5c2..38aeeef290 100644 --- a/notebook/services/contents/largefilemanager.py +++ b/notebook/services/contents/largefilemanager.py @@ -1,9 +1,7 @@ from notebook.services.contents.filemanager import FileContentsManager -from contextlib import contextmanager from tornado import web -import nbformat import base64 -import os, io +import os class LargeFileManager(FileContentsManager): """Handle large file upload.""" @@ -13,13 +11,13 @@ def save(self, model, path=''): chunk = model.get('chunk', None) if chunk is not None: path = path.strip('/') - + if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') + raise web.HTTPError(400, 'No file type provided') if model['type'] != 'file': - raise web.HTTPError(400, u'File type "{}" is not supported for large file transfer'.format(model['type'])) + raise web.HTTPError(400, f'File type "{model["type"]}" is not supported for large file transfer') if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + raise web.HTTPError(400, 'No file content provided') os_path = self._get_os_path(path) @@ -33,8 +31,8 @@ def save(self, model, path=''): except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) from e + self.log.error('Error while saving file: %s %s', path, e, exc_info=True) + raise web.HTTPError(500, f'Unexpected error while saving file: {path} {e}') from e model = self.get(path, content=False) @@ -60,11 +58,11 @@ def _save_large_file(self, os_path, content, format): bcontent = base64.b64decode(b64_bytes) except Exception as e: raise web.HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) + 400, f'Encoding error saving {os_path}: {e}' ) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) - with io.open(os_path, 'ab') as f: + with open(os_path, 'ab') as f: f.write(bcontent) diff --git a/notebook/services/contents/manager.py b/notebook/services/contents/manager.py index b556abc1d3..65b4aa449f 100644 --- a/notebook/services/contents/manager.py +++ b/notebook/services/contents/manager.py @@ -55,7 +55,7 @@ class ContentsManager(LoggingConfigurable): indicating the root path. """ - + root_dir = Unicode('/', config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") @@ -65,7 +65,7 @@ def _notary_default(self): return sign.NotebookNotary(parent=self) hide_globs = List(Unicode(), [ - u'__pycache__', '*.pyc', '*.pyo', + '__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~', ], config=True, help=""" Glob patterns to hide in file and directory listings. @@ -302,7 +302,7 @@ def info_string(self): def get_kernel_path(self, path, model=None): """Return the API path for the kernel - + KernelManagers can turn this value into a filesystem path, or ignore it altogether. @@ -334,17 +334,16 @@ def increment_filename(self, filename, path='', insert=''): basename, dot, ext = filename.rpartition('.') if ext != 'ipynb': basename, dot, ext = filename.partition('.') - + suffix = dot + ext for i in itertools.count(): if i: - insert_i = '{}{}'.format(insert, i) + insert_i = f'{insert}{i}' else: insert_i = '' - name = u'{basename}{insert}{suffix}'.format(basename=basename, - insert=insert_i, suffix=suffix) - if not self.exists(u'{}/{}'.format(path, name)): + name = f'{basename}{insert_i}{suffix}' + if not self.exists(f'{path}/{name}'): break return name @@ -353,33 +352,32 @@ def validate_notebook_model(self, model): try: validate_nb(model['content']) except ValidationError as e: - model['message'] = u'Notebook validation failed: {}:\n{}'.format( - e.message, json.dumps(e.instance, indent=1, default=lambda obj: ''), - ) + model['message'] = f'Notebook validation failed: {e.message}:\n' \ + f'{json.dumps(e.instance, indent=1, default=lambda obj: "")}' return model - + def new_untitled(self, path='', type='', ext=''): """Create a new untitled file or directory in path - + path must be a directory - + File extension can be specified. - + Use `new` to create files with a fully specified path (including filename). """ path = path.strip('/') if not self.dir_exists(path): raise HTTPError(404, 'No such directory: %s' % path) - + model = {} if type: model['type'] = type - + if ext == '.ipynb': model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') - + insert = '' if model['type'] == 'directory': untitled = self.untitled_directory @@ -391,25 +389,25 @@ def new_untitled(self, path='', type='', ext=''): untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model['type']) - + name = self.increment_filename(untitled + ext, path, insert=insert) - path = u'{0}/{1}'.format(path, name) + path = f'{path}/{name}' return self.new(model, path) - + def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. - + To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} - + if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') - + # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': @@ -419,7 +417,7 @@ def new(self, model=None, path=''): model['content'] = '' model['type'] = 'file' model['format'] = 'text' - + model = self.save(model, path) return model @@ -429,7 +427,7 @@ def copy(self, from_path, to_path=None): If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. - For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. from_path must be a full path to a file. """ @@ -442,20 +440,20 @@ def copy(self, from_path, to_path=None): else: from_dir = '' from_name = path - + model = self.get(path) model.pop('path', None) model.pop('name', None) if model['type'] == 'directory': raise HTTPError(400, "Can't copy directories") - + if to_path is None: to_path = from_dir if self.dir_exists(to_path): - name = copy_pat.sub(u'.', from_name) + name = copy_pat.sub('.', from_name) to_name = self.increment_filename(name, to_path, insert='-Copy') - to_path = u'{0}/{1}'.format(to_path, to_name) - + to_path = f'{to_path}/{to_name}' + model = self.save(model, to_path) return model diff --git a/notebook/services/contents/tests/test_contents_api.py b/notebook/services/contents/tests/test_contents_api.py index 6e4ad49dbc..bbab19e14c 100644 --- a/notebook/services/contents/tests/test_contents_api.py +++ b/notebook/services/contents/tests/test_contents_api.py @@ -2,7 +2,6 @@ from contextlib import contextmanager from functools import partial -import io import json import os import shutil @@ -28,10 +27,7 @@ from ipython_genutils import py3compat from ipython_genutils.tempdir import TemporaryDirectory -try: #PY3 - from base64 import encodebytes, decodebytes -except ImportError: #PY2 - from base64 import encodestring as encodebytes, decodestring as decodebytes +from base64 import encodebytes, decodebytes def uniq_stable(elems): @@ -50,7 +46,7 @@ def dirs_only(dir_model): return [x for x in dir_model['content'] if x['type']=='directory'] -class API(object): +class API: """Wrapper for contents API calls.""" def __init__(self, request): self.request = request @@ -128,16 +124,16 @@ class APITest(NotebookTestBase): """Test the kernels web service API""" dirs_nbs = [('', 'inroot'), ('Directory with spaces in', 'inspace'), - (u'unicodé', 'innonascii'), + ('unicodé', 'innonascii'), ('foo', 'a'), ('foo', 'b'), ('foo', 'name with spaces'), - ('foo', u'unicodé'), + ('foo', 'unicodé'), ('foo/bar', 'baz'), ('ordering', 'A'), ('ordering', 'b'), ('ordering', 'C'), - (u'å b', u'ç d'), + ('å b', 'ç d'), ] hidden_dirs = ['.hidden', '__pycache__'] @@ -151,11 +147,11 @@ def _blob_for_name(name): @staticmethod def _txt_for_name(name): - return u'%s text file' % name - + return f'{name} text file' + def to_os_path(self, api_path): return to_os_path(api_path, root=self.notebook_dir) - + def make_dir(self, api_path): """Create a directory at api_path""" os_path = self.to_os_path(api_path) @@ -167,20 +163,20 @@ def make_dir(self, api_path): def make_txt(self, api_path, txt): """Make a text file at a given api_path""" os_path = self.to_os_path(api_path) - with io.open(os_path, 'w', encoding='utf-8') as f: + with open(os_path, 'w', encoding='utf-8') as f: f.write(txt) - + def make_blob(self, api_path, blob): """Make a binary file at a given api_path""" os_path = self.to_os_path(api_path) - with io.open(os_path, 'wb') as f: + with open(os_path, 'wb') as f: f.write(blob) - + def make_nb(self, api_path, nb): """Make a notebook file at a given api_path""" os_path = self.to_os_path(api_path) - - with io.open(os_path, 'w', encoding='utf-8') as f: + + with open(os_path, 'w', encoding='utf-8') as f: write(nb, f, version=4) def delete_dir(self, api_path): @@ -192,10 +188,10 @@ def delete_file(self, api_path): """Delete a file at the given path if it exists.""" if self.isfile(api_path): os.unlink(self.to_os_path(api_path)) - + def isfile(self, api_path): return os.path.isfile(self.to_os_path(api_path)) - + def isdir(self, api_path): return os.path.isdir(self.to_os_path(api_path)) @@ -215,18 +211,18 @@ def setUp(self): for d, name in self.dirs_nbs: # create a notebook nb = new_notebook() - nbname = u'{}/{}.ipynb'.format(d, name) + nbname = f'{d}/{name}.ipynb' self.make_nb(nbname, nb) self.addCleanup(partial(self.delete_file, nbname)) # create a text file txt = self._txt_for_name(name) - txtname = u'{}/{}.txt'.format(d, name) + txtname = f'{d}/{name}.txt' self.make_txt(txtname, txt) self.addCleanup(partial(self.delete_file, txtname)) blob = self._blob_for_name(name) - blobname = u'{}/{}.blob'.format(d, name) + blobname = f'{d}/{name}.blob' self.make_blob(blobname, blob) self.addCleanup(partial(self.delete_file, blobname)) @@ -241,10 +237,10 @@ def test_list_notebooks(self): self.assertEqual(len(nbs), 1) self.assertEqual(nbs[0]['name'], 'inspace.ipynb') - nbs = notebooks_only(self.api.list(u'/unicodé/').json()) + nbs = notebooks_only(self.api.list('/unicodé/').json()) self.assertEqual(len(nbs), 1) self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') - self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb') + self.assertEqual(nbs[0]['path'], 'unicodé/innonascii.ipynb') nbs = notebooks_only(self.api.list('/foo/bar/').json()) self.assertEqual(len(nbs), 1) @@ -254,7 +250,7 @@ def test_list_notebooks(self): nbs = notebooks_only(self.api.list('foo').json()) self.assertEqual(len(nbs), 4) nbnames = { normalize('NFC', n['name']) for n in nbs } - expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'] + expected = [ 'a.ipynb', 'b.ipynb', 'name with spaces.ipynb', 'unicodé.ipynb'] expected = { normalize('NFC', name) for name in expected } self.assertEqual(nbnames, expected) @@ -284,7 +280,7 @@ def test_get_nb_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) + self.assertEqual(nb['name'], '%s.ipynb' % name) self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) @@ -296,7 +292,7 @@ def test_get_nb_no_content(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path, content=False).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) + self.assertEqual(nb['name'], f'{name}.ipynb') self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) @@ -311,7 +307,7 @@ def test_get_nb_invalid(self): 'metadata': {}, }], } - path = u'å b/Validate tést.ipynb' + path = 'å b/Validate tést.ipynb' self.make_txt(path, py3compat.cast_unicode(json.dumps(nb))) model = self.api.read(path).json() self.assertEqual(model['path'], path) @@ -329,7 +325,7 @@ def test_get_text_file_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.txt') model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.txt' % name) + self.assertEqual(model['name'], f'{name}.txt') self.assertEqual(model['path'], path) self.assertIn('content', model) self.assertEqual(model['format'], 'text') @@ -348,7 +344,7 @@ def test_get_binary_file_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.blob') model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.blob' % name) + self.assertEqual(model['name'], f'{name}.blob') self.assertEqual(model['path'], path) self.assertIn('content', model) self.assertEqual(model['format'], 'base64') @@ -364,15 +360,15 @@ def test_get_binary_file_contents(self): def test_get_bad_type(self): with assert_http_error(400): - self.api.read(u'unicodé', type='file') # this is a directory + self.api.read('unicodé', type='file') # this is a directory with assert_http_error(400): - self.api.read(u'unicodé/innonascii.ipynb', type='directory') + self.api.read('unicodé/innonascii.ipynb', type='directory') def _check_created(self, resp, path, type='notebook'): self.assertEqual(resp.status_code, 201) location_header = py3compat.str_to_unicode(resp.headers['Location']) - self.assertEqual(location_header, url_path_join(self.url_prefix, u'api/contents', url_escape(path))) + self.assertEqual(location_header, url_path_join(self.url_prefix, 'api/contents', url_escape(path))) rjson = resp.json() self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) self.assertEqual(rjson['path'], path) @@ -381,12 +377,12 @@ def _check_created(self, resp, path, type='notebook'): assert isright(path) def test_create_untitled(self): - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled.ipynb') + resp = self.api.create_untitled(path='å b') + self._check_created(resp, 'å b/Untitled.ipynb') # Second time - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled1.ipynb') + resp = self.api.create_untitled(path='å b') + self._check_created(resp, 'å b/Untitled1.ipynb') # And two directories down resp = self.api.create_untitled(path='foo/bar') @@ -405,39 +401,39 @@ def test_create_untitled_txt(self): def test_upload(self): nb = new_notebook() nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' + path = 'å b/Upload tést.ipynb' resp = self.api.upload(path, body=json.dumps(nbmodel)) self._check_created(resp, path) def test_mkdir_untitled(self): - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder', type='directory') + resp = self.api.mkdir_untitled(path='å b') + self._check_created(resp, 'å b/Untitled Folder', type='directory') # Second time - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder 1', type='directory') + resp = self.api.mkdir_untitled(path='å b') + self._check_created(resp, 'å b/Untitled Folder 1', type='directory') # And two directories down resp = self.api.mkdir_untitled(path='foo/bar') self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') def test_mkdir(self): - path = u'å b/New ∂ir' + path = 'å b/New ∂ir' resp = self.api.mkdir(path) self._check_created(resp, path, type='directory') def test_mkdir_hidden_400(self): with assert_http_error(400): - resp = self.api.mkdir(u'å b/.hidden') + resp = self.api.mkdir('å b/.hidden') def test_upload_txt(self): - body = u'ünicode téxt' + body = 'ünicode téxt' model = { 'content' : body, 'format' : 'text', 'type' : 'file', } - path = u'å b/Upload tést.txt' + path = 'å b/Upload tést.txt' resp = self.api.upload(path, body=json.dumps(model)) # check roundtrip @@ -455,7 +451,7 @@ def test_upload_b64(self): 'format' : 'base64', 'type' : 'file', } - path = u'å b/Upload tést.blob' + path = 'å b/Upload tést.blob' resp = self.api.upload(path, body=json.dumps(model)) # check roundtrip @@ -473,7 +469,7 @@ def test_upload_v2(self): nb.worksheets.append(ws) ws.cells.append(v2.new_code_cell(input='print("hi")')) nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' + path = 'å b/Upload tést.ipynb' resp = self.api.upload(path, body=json.dumps(nbmodel)) self._check_created(resp, path) resp = self.api.read(path) @@ -481,34 +477,34 @@ def test_upload_v2(self): self.assertEqual(data['content']['nbformat'], 4) def test_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy1.ipynb') + + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy2.ipynb') + def test_copy_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy1.ipynb') + + resp = self.api.copy('å b/ç d-Copy1.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy2.ipynb') + def test_copy_path(self): - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a.ipynb') - - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a-Copy1.ipynb') + resp = self.api.copy('foo/a.ipynb', 'å b') + self._check_created(resp, 'å b/a.ipynb') + + resp = self.api.copy('foo/a.ipynb', 'å b') + self._check_created(resp, 'å b/a-Copy1.ipynb') def test_copy_put_400(self): with assert_http_error(400): - resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb') + resp = self.api.copy_put('å b/ç d.ipynb', 'å b/cøpy.ipynb') def test_copy_dir_400(self): # can't copy directories with assert_http_error(400): - resp = self.api.copy(u'å b', u'foo') + resp = self.api.copy('å b', 'foo') def test_delete(self): for d, name in self.dirs_nbs: @@ -537,15 +533,15 @@ def test_delete_non_empty_dir(self): self.skipTest("Disabled deleting non-empty dirs on Windows") # Test that non empty directory can be deleted try: - self.api.delete(u'å b') + self.api.delete('å b') except requests.HTTPError as e: if e.response.status_code == 400: - if not self.can_send2trash(u'å b'): + if not self.can_send2trash('å b'): self.skipTest("Dir can't be sent to trash") raise # Check if directory has actually been deleted with assert_http_error(404): - self.api.list(u'å b') + self.api.list('å b') def test_rename(self): resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') @@ -555,7 +551,7 @@ def test_rename(self): assert self.isfile('foo/z.ipynb') nbs = notebooks_only(self.api.list('foo').json()) - nbnames = set(n['name'] for n in nbs) + nbnames = {n['name'] for n in nbs} self.assertIn('z.ipynb', nbnames) self.assertNotIn('a.ipynb', nbnames) @@ -599,7 +595,7 @@ def test_save(self): resp = self.api.read('foo/a.ipynb') nbcontent = json.loads(resp.text)['content'] nb = from_dict(nbcontent) - nb.cells.append(new_markdown_cell(u'Created by test ³')) + nb.cells.append(new_markdown_cell('Created by test ³')) nbmodel = {'content': nb, 'type': 'notebook'} resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) @@ -607,7 +603,7 @@ def test_save(self): nbcontent = self.api.read('foo/a.ipynb').json()['content'] newnb = from_dict(nbcontent) self.assertEqual(newnb.cells[0].source, - u'Created by test ³') + 'Created by test ³') def test_checkpoints(self): resp = self.api.read('foo/a.ipynb') diff --git a/notebook/services/contents/tests/test_fileio.py b/notebook/services/contents/tests/test_fileio.py index adc06d97f1..281be6a3d6 100644 --- a/notebook/services/contents/tests/test_fileio.py +++ b/notebook/services/contents/tests/test_fileio.py @@ -3,7 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import io as stdlib_io import os.path import unittest import pytest @@ -17,13 +16,14 @@ umask = 0 def test_atomic_writing(): - class CustomExc(Exception): pass + class CustomExc(Exception): + pass with TemporaryDirectory() as td: f1 = os.path.join(td, 'penguin') - with stdlib_io.open(f1, 'w') as f: - f.write(u'Before') - + with open(f1, 'w') as f: + f.write('Before') + if os.name != 'nt': os.chmod(f1, 0o701) orig_mode = stat.S_IMODE(os.stat(f1).st_mode) @@ -40,18 +40,18 @@ class CustomExc(Exception): pass with pytest.raises(CustomExc): with atomic_writing(f1) as f: - f.write(u'Failing write') + f.write('Failing write') raise CustomExc # Because of the exception, the file should not have been modified - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'Before' + with open(f1) as f: + assert f.read() == 'Before' with atomic_writing(f1) as f: - f.write(u'Overwritten') + f.write('Overwritten') - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'Overwritten' + with open(f1) as f: + assert f.read() == 'Overwritten' if os.name != 'nt': mode = stat.S_IMODE(os.stat(f1).st_mode) @@ -60,10 +60,10 @@ class CustomExc(Exception): pass if have_symlink: # Check that writing over a file preserves a symlink with atomic_writing(f2) as f: - f.write(u'written from symlink') - - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'written from symlink' + f.write('written from symlink') + + with open(f1) as f: + assert f.read() == 'written from symlink' class TestWithSetUmask(unittest.TestCase): def setUp(self): @@ -71,7 +71,7 @@ def setUp(self): global umask umask = os.umask(0) os.umask(umask) - + def tearDown(self): # restore umask os.umask(umask) @@ -82,14 +82,14 @@ def test_atomic_writing_umask(self): os.umask(0o022) f1 = os.path.join(td, '1') with atomic_writing(f1) as f: - f.write(u'1') + f.write('1') mode = stat.S_IMODE(os.stat(f1).st_mode) assert mode == 0o644 - + os.umask(0o057) f2 = os.path.join(td, '2') with atomic_writing(f2) as f: - f.write(u'2') + f.write('2') mode = stat.S_IMODE(os.stat(f2).st_mode) assert mode == 0o620 @@ -97,36 +97,36 @@ def test_atomic_writing_umask(self): def test_atomic_writing_newlines(): with TemporaryDirectory() as td: path = os.path.join(td, 'testfile') - - lf = u'a\nb\nc\n' - plat = lf.replace(u'\n', os.linesep) - crlf = lf.replace(u'\n', u'\r\n') - + + lf = 'a\nb\nc\n' + plat = lf.replace('\n', os.linesep) + crlf = lf.replace('\n', '\r\n') + # test default - with stdlib_io.open(path, 'w') as f: + with open(path, 'w') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == plat - + # test newline=LF - with stdlib_io.open(path, 'w', newline='\n') as f: + with open(path, 'w', newline='\n') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == lf - + # test newline=CRLF with atomic_writing(path, newline='\r\n') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == crlf - + # test newline=no convert - text = u'crlf\r\ncr\rlf\n' + text = 'crlf\r\ncr\rlf\n' with atomic_writing(path, newline='') as f: f.write(text) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == text diff --git a/notebook/services/contents/tests/test_largefilemanager.py b/notebook/services/contents/tests/test_largefilemanager.py index 13d294b9b0..4f52b2453d 100644 --- a/notebook/services/contents/tests/test_largefilemanager.py +++ b/notebook/services/contents/tests/test_largefilemanager.py @@ -68,7 +68,7 @@ def test_save(self): try: model = {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', - 'content': u'test', 'format': 'json'} + 'content': 'test', 'format': 'json'} cm.save(model, model['path']) except web.HTTPError as e: self.assertEqual("HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", @@ -76,7 +76,7 @@ def test_save(self): # Save model for different chunks model = {'name': 'test', 'path': 'test', 'type': 'file', - 'content': u'test==', 'format': 'text'} + 'content': 'test==', 'format': 'text'} name = model['name'] path = model['path'] cm.save(model, path) diff --git a/notebook/services/contents/tests/test_manager.py b/notebook/services/contents/tests/test_manager.py index dfe5d272f5..340ffefd58 100644 --- a/notebook/services/contents/tests/test_manager.py +++ b/notebook/services/contents/tests/test_manager.py @@ -87,7 +87,7 @@ def test_get_os_path(self): self.assertEqual(path, fs_path) def test_checkpoint_subdir(self): - subd = u'sub ∂ir' + subd = 'sub ∂ir' cp_name = 'test-cp.ipynb' with TemporaryDirectory() as td: root = td @@ -149,7 +149,7 @@ def test_good_symlink(self): cm = FileContentsManager(root_dir=td) parent = 'test good symlink' name = 'good symlink' - path = '{0}/{1}'.format(parent, name) + path = f'{parent}/{name}' _make_dir(cm, parent) file_model = cm.new(path=parent + '/zfoo.txt') @@ -175,7 +175,7 @@ def test_403(self): os.chmod(os_path, 0o400) try: with cm.open(os_path, 'w') as f: - f.write(u"don't care") + f.write("don't care") except HTTPError as e: self.assertEqual(e.status_code, 403) else: @@ -201,7 +201,7 @@ def test_escape_root(self): with self.assertRaisesHTTPError(404): cm.save(model={ 'type': 'file', - 'content': u'', + 'content': '', 'format': 'text', }, path='../foo') @@ -378,19 +378,19 @@ def test_get(self): self.assertIn('path', model2) self.assertIn('content', model2) self.assertEqual(model2['name'], 'Untitled.ipynb') - self.assertEqual(model2['path'], '{0}/{1}'.format(sub_dir.strip('/'), name)) + self.assertEqual(model2['path'], f'{sub_dir.strip("/")}/{name}') # Test with a regular file. file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path'] file_model = cm.get(file_model_path) self.assertDictContainsSubset( { - 'content': u'', - 'format': u'text', - 'mimetype': u'text/plain', - 'name': u'untitled.txt', - 'path': u'foo/untitled.txt', - 'type': u'file', + 'content': '', + 'format': 'text', + 'mimetype': 'text/plain', + 'name': 'untitled.txt', + 'path': 'foo/untitled.txt', + 'type': 'file', 'writable': True, }, file_model, @@ -413,7 +413,7 @@ def test_get(self): # Directory contents should match the contents of each individual entry # when requested with content=False. model2_no_content = cm.get(sub_dir + name, content=False) - file_model_no_content = cm.get(u'foo/untitled.txt', content=False) + file_model_no_content = cm.get('foo/untitled.txt', content=False) sub_sub_dir_no_content = cm.get('foo/bar', content=False) self.assertEqual(sub_sub_dir_no_content['path'], 'foo/bar') self.assertEqual(sub_sub_dir_no_content['name'], 'bar') @@ -582,9 +582,9 @@ def test_delete_root(self): def test_copy(self): cm = self.contents_manager - parent = u'å b' - name = u'nb √.ipynb' - path = u'{0}/{1}'.format(parent, name) + parent = 'å b' + name = 'nb √.ipynb' + path = f'{parent}/{name}' self.make_dir(parent) orig = cm.new(path=path) @@ -593,11 +593,11 @@ def test_copy(self): self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy1.ipynb')) # copy with specified name - copy2 = cm.copy(path, u'å b/copy 2.ipynb') - self.assertEqual(copy2['name'], u'copy 2.ipynb') - self.assertEqual(copy2['path'], u'å b/copy 2.ipynb') + copy2 = cm.copy(path, 'å b/copy 2.ipynb') + self.assertEqual(copy2['name'], 'copy 2.ipynb') + self.assertEqual(copy2['path'], 'å b/copy 2.ipynb') # copy with specified path - copy2 = cm.copy(path, u'/') + copy2 = cm.copy(path, '/') self.assertEqual(copy2['name'], name) self.assertEqual(copy2['path'], name) diff --git a/notebook/services/kernels/handlers.py b/notebook/services/kernels/handlers.py index 2b7f89b6f8..8e0a181011 100644 --- a/notebook/services/kernels/handlers.py +++ b/notebook/services/kernels/handlers.py @@ -305,7 +305,7 @@ def _finish_kernel_info(self, info): protocol_version = info.get('protocol_version', client_protocol_version) if protocol_version != client_protocol_version: self.session.adapt_version = int(protocol_version.split('.')[0]) - self.log.info("Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format(protocol_version=protocol_version, kernel_id=self.kernel_id, client_protocol_version=client_protocol_version)) + self.log.info(f"Adapting from protocol version {protocol_version} (kernel {self.kernel_id}) to {client_protocol_version} (client).") if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) @@ -485,7 +485,7 @@ def write_stderr(error_message): # Increment the bytes and message count self._iopub_window_msg_count += 1 if msg_type == 'stream': - byte_count = sum([len(x) for x in msg_list]) + byte_count = sum(len(x) for x in msg_list) else: byte_count = 0 self._iopub_window_byte_count += byte_count diff --git a/notebook/services/kernels/kernelmanager.py b/notebook/services/kernels/kernelmanager.py index 7ed182dfc0..aa93bc149b 100644 --- a/notebook/services/kernels/kernelmanager.py +++ b/notebook/services/kernels/kernelmanager.py @@ -33,7 +33,7 @@ try: from jupyter_client.multikernelmanager import AsyncMultiKernelManager except ImportError: - class AsyncMultiKernelManager(object): + class AsyncMultiKernelManager: """Empty class to satisfy unused reference by AsyncMappingKernelManager.""" def __init__(self, **kwargs): pass @@ -391,7 +391,7 @@ def list_kernels(self): def _check_kernel_id(self, kernel_id): """Check a that a kernel_id exists and raise 404 if not.""" if kernel_id not in self: - raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) + raise web.HTTPError(404, f'Kernel does not exist: {kernel_id}') # monitoring activity: diff --git a/notebook/services/kernels/tests/test_kernels_api.py b/notebook/services/kernels/tests/test_kernels_api.py index 7895f36a81..f11963c7b2 100644 --- a/notebook/services/kernels/tests/test_kernels_api.py +++ b/notebook/services/kernels/tests/test_kernels_api.py @@ -24,7 +24,7 @@ async_testing_enabled = False -class KernelAPI(object): +class KernelAPI: """Wrapper for kernel REST API requests""" def __init__(self, request, base_url, headers): self.request = request @@ -204,11 +204,11 @@ class AsyncKernelAPITest(KernelAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncKernelAPITest tests skipped due to down-level jupyter_client!") - super(AsyncKernelAPITest, cls).setup_class() + super().setup_class() @classmethod def get_argv(cls): - argv = super(AsyncKernelAPITest, cls).get_argv() + argv = super().get_argv() # before we extend the argv with the class, ensure that appropriate jupyter_client is available. # if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests. @@ -244,11 +244,11 @@ class KernelCullingTest(NotebookTestBase): @classmethod def get_argv(cls): - argv = super(KernelCullingTest, cls).get_argv() + argv = super().get_argv() # Enable culling with 5s timeout and 1s intervals - argv.extend(['--MappingKernelManager.cull_idle_timeout={}'.format(CULL_TIMEOUT), - '--MappingKernelManager.cull_interval={}'.format(CULL_INTERVAL), + argv.extend([f'--MappingKernelManager.cull_idle_timeout={CULL_TIMEOUT}', + f'--MappingKernelManager.cull_interval={CULL_INTERVAL}', '--MappingKernelManager.cull_connected=False']) return argv diff --git a/notebook/services/kernelspecs/handlers.py b/notebook/services/kernelspecs/handlers.py index a01d307fb2..639e916e2d 100644 --- a/notebook/services/kernelspecs/handlers.py +++ b/notebook/services/kernelspecs/handlers.py @@ -87,7 +87,7 @@ def get(self, kernel_name): try: spec = yield maybe_future(ksm.get_kernel_spec(kernel_name)) except KeyError as e: - raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name) from e + raise web.HTTPError(404, f'Kernel spec {kernel_name} not found') from e if is_kernelspec_model(spec): model = spec else: diff --git a/notebook/services/kernelspecs/tests/test_kernelspecs_api.py b/notebook/services/kernelspecs/tests/test_kernelspecs_api.py index 215bfc861b..e60979c751 100644 --- a/notebook/services/kernelspecs/tests/test_kernelspecs_api.py +++ b/notebook/services/kernelspecs/tests/test_kernelspecs_api.py @@ -1,15 +1,12 @@ """Test the kernel specs webservice API.""" import errno -import io import json import os import shutil pjoin = os.path.join -import requests - from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from notebook.utils import url_path_join, url_escape from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error @@ -20,10 +17,10 @@ 'display_name':'Test kernel', } -some_resource = u"The very model of a modern major general" +some_resource = "The very model of a modern major general" -class KernelSpecAPI(object): +class KernelSpecAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request @@ -41,7 +38,7 @@ def list(self): def kernel_spec_info(self, name): return self._req('GET', url_path_join('api/kernelspecs', name)) - + def kernel_resource(self, name, path): return self._req('GET', url_path_join('kernelspecs', name, path)) @@ -60,11 +57,11 @@ def create_spec(self, name): except OSError as e: if e.errno != errno.EEXIST: raise - + with open(pjoin(sample_kernel_dir, 'kernel.json'), 'w') as f: json.dump(sample_kernel_json, f) - - with io.open(pjoin(sample_kernel_dir, 'resource.txt'), 'w', + + with open(pjoin(sample_kernel_dir, 'resource.txt'), 'w', encoding='utf-8') as f: f.write(some_resource) @@ -76,10 +73,10 @@ def test_list_kernelspecs_bad(self): except OSError as e: if e.errno != errno.EEXIST: raise - + with open(pjoin(bad_kernel_dir, 'kernel.json'), 'w') as f: f.write("garbage") - + model = self.ks_api.list().json() assert isinstance(model, dict) self.assertEqual(model['default'], NATIVE_KERNEL_NAME) @@ -87,9 +84,9 @@ def test_list_kernelspecs_bad(self): assert isinstance(specs, dict) # 2: the sample kernelspec created in setUp, and the native Python kernel self.assertGreaterEqual(len(specs), 2) - + shutil.rmtree(bad_kernel_dir) - + def test_list_kernelspecs(self): model = self.ks_api.list().json() assert isinstance(model, dict) @@ -123,14 +120,14 @@ def test_get_kernelspec_spaces(self): def test_get_nonexistant_kernelspec(self): with assert_http_error(404): self.ks_api.kernel_spec_info('nonexistant') - + def test_get_kernel_resource_file(self): res = self.ks_api.kernel_resource('sAmple', 'resource.txt') self.assertEqual(res.text, some_resource) - + def test_get_nonexistant_resource(self): with assert_http_error(404): self.ks_api.kernel_resource('nonexistant', 'resource.txt') - + with assert_http_error(404): self.ks_api.kernel_resource('sample', 'nonexistant.txt') diff --git a/notebook/services/nbconvert/tests/test_nbconvert_api.py b/notebook/services/nbconvert/tests/test_nbconvert_api.py index d6ef9d2ca5..2878dc0920 100644 --- a/notebook/services/nbconvert/tests/test_nbconvert_api.py +++ b/notebook/services/nbconvert/tests/test_nbconvert_api.py @@ -3,7 +3,7 @@ from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase -class NbconvertAPI(object): +class NbconvertAPI: """Wrapper for nbconvert API calls.""" def __init__(self, request): self.request = request diff --git a/notebook/services/sessions/sessionmanager.py b/notebook/services/sessions/sessionmanager.py index 5686332868..4294580ede 100644 --- a/notebook/services/sessions/sessionmanager.py +++ b/notebook/services/sessions/sessionmanager.py @@ -187,12 +187,12 @@ def get_session(self, **kwargs): for key, value in kwargs.items(): q.append("%s=%r" % (key, value)) - raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q))) + raise web.HTTPError(404, f'Session not found: {", ".join(q)}') try: model = yield maybe_future(self.row_to_model(row)) except KeyError as e: - raise web.HTTPError(404, u'Session not found: %s' % str(e)) + raise web.HTTPError(404, f'Session not found: {e}') raise gen.Return(model) @gen.coroutine diff --git a/notebook/services/sessions/tests/test_sessionmanager.py b/notebook/services/sessions/tests/test_sessionmanager.py index 9af38033c8..b58f8f51d5 100644 --- a/notebook/services/sessions/tests/test_sessionmanager.py +++ b/notebook/services/sessions/tests/test_sessionmanager.py @@ -11,7 +11,7 @@ from notebook.services.contents.manager import ContentsManager from notebook._tz import utcnow, isoformat -class DummyKernel(object): +class DummyKernel: def __init__(self, kernel_name='python'): self.kernel_name = kernel_name @@ -22,11 +22,11 @@ class DummyMKM(MappingKernelManager): """MappingKernelManager interface that doesn't start kernels, for testing""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.id_letters = iter(u'ABCDEFGHIJK') + self.id_letters = iter('ABCDEFGHIJK') def _new_id(self): return next(self.id_letters) - + def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) @@ -40,7 +40,7 @@ def shutdown_kernel(self, kernel_id, now=False): class TestSessionManager(TestCase): - + def setUp(self): self.sm = SessionManager( kernel_manager=DummyMKM(), @@ -59,7 +59,7 @@ def co_add(): sessions.append(session) raise gen.Return(sessions) return self.loop.run_sync(co_add) - + def create_session(self, **kwargs): return self.create_sessions(kwargs)[0] @@ -68,8 +68,8 @@ def test_get_session(self): session_id = self.create_session(path='/path/to/test.ipynb', kernel_name='bar')['id'] model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) expected = {'id':session_id, - 'path': u'/path/to/test.ipynb', - 'notebook': {'path': u'/path/to/test.ipynb', 'name': None}, + 'path': '/path/to/test.ipynb', + 'notebook': {'path': '/path/to/test.ipynb', 'name': None}, 'type': 'notebook', 'name': None, 'kernel': { @@ -103,18 +103,18 @@ def test_get_session_dead_kernel(self): def test_list_sessions(self): sm = self.sm sessions = self.create_sessions( - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.py', type='file', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + {'path': '/path/to/1/test1.ipynb', 'kernel_name': 'python'}, + {'path': '/path/to/2/test2.py', 'type': 'file', 'kernel_name': 'python'}, + {'path': '/path/to/3', 'name': 'foo', 'type': 'console', 'kernel_name': 'python'}, ) - + sessions = self.loop.run_sync(lambda: sm.list_sessions()) expected = [ { 'id':sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', + 'path': '/path/to/1/test1.ipynb', 'type': 'notebook', - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/1/test1.ipynb', 'name': None}, 'name': None, 'kernel': { 'id': 'A', @@ -125,7 +125,7 @@ def test_list_sessions(self): } }, { 'id':sessions[1]['id'], - 'path': u'/path/to/2/test2.py', + 'path': '/path/to/2/test2.py', 'type': 'file', 'name': None, 'kernel': { @@ -137,7 +137,7 @@ def test_list_sessions(self): } }, { 'id':sessions[2]['id'], - 'path': u'/path/to/3', + 'path': '/path/to/3', 'type': 'console', 'name': 'foo', 'kernel': { @@ -163,10 +163,10 @@ def test_list_sessions_dead_kernel(self): expected = [ { 'id': sessions[1]['id'], - 'path': u'/path/to/2/test2.ipynb', + 'path': '/path/to/2/test2.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/2/test2.ipynb', 'name': None}, 'kernel': { 'id': 'B', 'name':'python', @@ -185,10 +185,10 @@ def test_update_session(self): self.loop.run_sync(lambda: sm.update_session(session_id, path='/path/to/new_name.ipynb')) model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) expected = {'id':session_id, - 'path': u'/path/to/new_name.ipynb', + 'path': '/path/to/new_name.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/new_name.ipynb', 'name': None}, 'kernel': { 'id': 'A', 'name':'julia', @@ -218,10 +218,10 @@ def test_delete_session(self): new_sessions = self.loop.run_sync(lambda: sm.list_sessions()) expected = [{ 'id': sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', + 'path': '/path/to/1/test1.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/1/test1.ipynb', 'name': None}, 'kernel': { 'id': 'A', 'name':'python', @@ -232,7 +232,7 @@ def test_delete_session(self): }, { 'id': sessions[2]['id'], 'type': 'console', - 'path': u'/path/to/3', + 'path': '/path/to/3', 'name': 'foo', 'kernel': { 'id': 'C', diff --git a/notebook/services/sessions/tests/test_sessions_api.py b/notebook/services/sessions/tests/test_sessions_api.py index cb4bc0bdea..0aa784b020 100644 --- a/notebook/services/sessions/tests/test_sessions_api.py +++ b/notebook/services/sessions/tests/test_sessions_api.py @@ -2,11 +2,9 @@ import errno from functools import partial -import io import os import json import shutil -import sys import time from unittest import SkipTest @@ -25,7 +23,7 @@ pjoin = os.path.join -class SessionAPI(object): +class SessionAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request @@ -101,7 +99,7 @@ def setUp(self): raise self.addCleanup(partial(shutil.rmtree, subdir, ignore_errors=True)) - with io.open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: + with open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) @@ -130,7 +128,7 @@ def test_create(self): self.assertIn('id', newsession) self.assertEqual(newsession['path'], 'foo/nb1.ipynb') self.assertEqual(newsession['type'], 'notebook') - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) + self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{}'.format(newsession['id'])) sessions = self.sess_api.list().json() self.assertEqual(sessions, [newsession]) @@ -174,7 +172,7 @@ def test_create_with_kernel_id(self): self.assertIn('id', newsession) self.assertEqual(newsession['path'], 'foo/nb1.ipynb') self.assertEqual(newsession['kernel']['id'], kernel['id']) - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) + self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{}'.format(newsession['id'])) sessions = self.sess_api.list().json() self.assertEqual(sessions, [newsession]) @@ -273,11 +271,11 @@ class AsyncSessionAPITest(SessionAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncSessionAPITest tests skipped due to down-level jupyter_client!") - super(AsyncSessionAPITest, cls).setup_class() + super().setup_class() @classmethod def get_argv(cls): - argv = super(AsyncSessionAPITest, cls).get_argv() + argv = super().get_argv() # Before we extend the argv with the class, ensure that appropriate jupyter_client is available. # if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests. diff --git a/notebook/terminal/handlers.py b/notebook/terminal/handlers.py index 0e026d00ab..a7d7d686bf 100644 --- a/notebook/terminal/handlers.py +++ b/notebook/terminal/handlers.py @@ -35,7 +35,7 @@ class NewTerminalHandler(IPythonHandler): def get(self, term_name): if term_name == 'new': raise web.HTTPError(400, "Terminal name 'new' is reserved.") - new_path = self.request.path.replace("new/{}".format(term_name), term_name) + new_path = self.request.path.replace(f"new/{term_name}", term_name) if term_name in self.terminal_manager.terminals: self.set_header('Location', new_path) self.set_status(302) @@ -50,7 +50,7 @@ class TermSocket(WebSocketMixin, IPythonHandler, terminado.TermSocket): def origin_check(self): """Terminado adds redundant origin_check - + Tornado already calls check_origin, so don't do anything here. """ return True diff --git a/notebook/terminal/terminalmanager.py b/notebook/terminal/terminalmanager.py index ed901310ec..28227c09d5 100644 --- a/notebook/terminal/terminalmanager.py +++ b/notebook/terminal/terminalmanager.py @@ -49,7 +49,7 @@ def create(self): def create_with_name(self, name): """Create a new terminal.""" if name in self.terminals: - raise web.HTTPError(409, "A terminal with name '{}' already exists.".format(name)) + raise web.HTTPError(409, f"A terminal with name '{name}' already exists.") term = self.get_terminal(name) return self._finish_create(name, term) @@ -110,7 +110,7 @@ def get_terminal_model(self, name): def _check_terminal(self, name): """Check a that terminal 'name' exists and raise 404 if not.""" if name not in self.terminals: - raise web.HTTPError(404, u'Terminal not found: %s' % name) + raise web.HTTPError(404, f'Terminal not found: {name}') def _initialize_culler(self): """Start culler if 'cull_inactive_timeout' is greater than zero. diff --git a/notebook/terminal/tests/test_terminals_api.py b/notebook/terminal/tests/test_terminals_api.py index 9e0cba82da..2eb22174c9 100644 --- a/notebook/terminal/tests/test_terminals_api.py +++ b/notebook/terminal/tests/test_terminals_api.py @@ -9,7 +9,7 @@ from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error -class TerminalAPI(object): +class TerminalAPI: """Wrapper for terminal REST API requests""" def __init__(self, request, base_url, headers): self.request = request diff --git a/notebook/tests/conftest.py b/notebook/tests/conftest.py index b9aee32cdc..135e814673 100644 --- a/notebook/tests/conftest.py +++ b/notebook/tests/conftest.py @@ -1,9 +1,7 @@ - - def pytest_addoption(parser): parser.addoption('--integration_tests', action='store_true', dest="integration_tests", default=False, help="enable integration tests") def pytest_configure(config): if not config.option.integration_tests: - setattr(config.option, 'markexpr', 'not integration_tests') \ No newline at end of file + setattr(config.option, 'markexpr', 'not integration_tests') diff --git a/notebook/tests/selenium/conftest.py b/notebook/tests/selenium/conftest.py index 64cdfa23bd..f1e77cc8c3 100644 --- a/notebook/tests/selenium/conftest.py +++ b/notebook/tests/selenium/conftest.py @@ -39,8 +39,8 @@ def notebook_server(): info = {} with TemporaryDirectory() as td: nbdir = info['nbdir'] = pjoin(td, 'notebooks') - os.makedirs(pjoin(nbdir, u'sub ∂ir1', u'sub ∂ir 1a')) - os.makedirs(pjoin(nbdir, u'sub ∂ir2', u'sub ∂ir 1b')) + os.makedirs(pjoin(nbdir, 'sub ∂ir1', 'sub ∂ir 1a')) + os.makedirs(pjoin(nbdir, 'sub ∂ir2', 'sub ∂ir 1b')) info['extra_env'] = { 'JUPYTER_CONFIG_DIR': pjoin(td, 'jupyter_config'), diff --git a/notebook/tests/selenium/test_deletecell.py b/notebook/tests/selenium/test_deletecell.py index 8253b9e63a..0e60adfaa6 100644 --- a/notebook/tests/selenium/test_deletecell.py +++ b/notebook/tests/selenium/test_deletecell.py @@ -1,6 +1,5 @@ - def cell_is_deletable(nb, index): - JS = 'return Jupyter.notebook.get_cell({}).is_deletable();'.format(index) + JS = f'return Jupyter.notebook.get_cell({index}).is_deletable();' return nb.browser.execute_script(JS) def remove_all_cells(notebook): @@ -20,11 +19,11 @@ def test_delete_cells(prefill_notebook): notebook.set_cell_metadata(0, 'deletable', 'false') notebook.set_cell_metadata(1, 'deletable', 0 - ) + ) assert not cell_is_deletable(notebook, 0) assert cell_is_deletable(notebook, 1) assert cell_is_deletable(notebook, 2) - + # Try to delete cell a (should not be deleted) notebook.delete_cell(0) assert notebook.get_cells_contents() == [a, b, c] diff --git a/notebook/tests/selenium/test_multiselect_toggle.py b/notebook/tests/selenium/test_multiselect_toggle.py index 14f51d0259..372d83b275 100644 --- a/notebook/tests/selenium/test_multiselect_toggle.py +++ b/notebook/tests/selenium/test_multiselect_toggle.py @@ -1,4 +1,3 @@ - INITIAL_CELLS = ['print("a")', 'print("b")', 'print("c")'] def test_multiselect_toggle(prefill_notebook): notebook = prefill_notebook(INITIAL_CELLS) @@ -24,7 +23,7 @@ def select_cells(): cell_output_states = notebook.browser.execute_script( "return Jupyter.notebook.get_cells().map(c => c.collapsed)") assert cell_output_states == [False] * 3, "ensure that all cells are not collapsed" - + # Test that cells, which start off not scrolled are scrolled after # calling the multiselected scroll toggle. select_cells() diff --git a/notebook/tests/selenium/test_prompt_numbers.py b/notebook/tests/selenium/test_prompt_numbers.py index 42e27775ec..38872b8550 100755 --- a/notebook/tests/selenium/test_prompt_numbers.py +++ b/notebook/tests/selenium/test_prompt_numbers.py @@ -1,4 +1,3 @@ - def test_prompt_numbers(prefill_notebook): notebook = prefill_notebook(['print("a")']) diff --git a/notebook/tests/selenium/test_save_as_notebook.py b/notebook/tests/selenium/test_save_as_notebook.py index 567087d438..781410ce83 100644 --- a/notebook/tests/selenium/test_save_as_notebook.py +++ b/notebook/tests/selenium/test_save_as_notebook.py @@ -19,8 +19,8 @@ def get_notebook_name(nb): return nb.browser.execute_script(JS) def set_notebook_name(nb, name): - JS = 'Jupyter.notebook.rename("{}")'.format(name) - nb.browser.execute_script(JS) + JS = f'Jupyter.notebook.rename("{name}")' + nb.browser.execute_script(JS) def test_save_notebook_as(notebook): # Set a name for comparison later diff --git a/notebook/tests/selenium/utils.py b/notebook/tests/selenium/utils.py index 4407fce39d..cf192bd2ee 100644 --- a/notebook/tests/selenium/utils.py +++ b/notebook/tests/selenium/utils.py @@ -92,29 +92,29 @@ def multiple_found(driver): class CellTypeError(ValueError): - + def __init__(self, message=""): self.message = message class Notebook: - + def __init__(self, browser): self.browser = browser self._wait_for_start() self.disable_autosave_and_onbeforeunload() - + def __len__(self): return len(self.cells) - + def __getitem__(self, key): return self.cells[key] - + def __setitem__(self, key, item): if isinstance(key, int): self.edit_cell(index=key, content=item, render=False) # TODO: re-add slicing support, handle general python slicing behaviour - # includes: overwriting the entire self.cells object if you do + # includes: overwriting the entire self.cells object if you do # self[:] = [] # elif isinstance(key, slice): # indices = (self.index(cell) for cell in self[key]) @@ -138,20 +138,20 @@ def body(self): @property def cells(self): """Gets all cells once they are visible. - + """ return self.browser.find_elements_by_class_name("cell") - + @property def current_index(self): return self.index(self.current_cell) - + def index(self, cell): return self.cells.index(cell) def disable_autosave_and_onbeforeunload(self): """Disable request to save before closing window and autosave. - + This is most easily done by using js directly. """ self.browser.execute_script("window.onbeforeunload = null;") @@ -159,7 +159,7 @@ def disable_autosave_and_onbeforeunload(self): def to_command_mode(self): """Changes us into command mode on currently focused cell - + """ self.body.send_keys(Keys.ESCAPE) self.browser.execute_script("return Jupyter.notebook.handle_command_mode(" @@ -171,7 +171,7 @@ def focus_cell(self, index=0): cell.click() self.to_command_mode() self.current_cell = cell - + def select_cell_range(self, initial_index=0, final_index=0): self.focus_cell(initial_index) self.to_command_mode() @@ -201,7 +201,7 @@ def convert_cell_type(self, index=0, cell_type="code"): else: raise CellTypeError(("{} is not a valid cell type," "use 'code', 'markdown', or 'raw'").format(cell_type)) - + self.wait_for_stale_cell(cell) self.focus_cell(index) return self.current_cell @@ -209,7 +209,7 @@ def convert_cell_type(self, index=0, cell_type="code"): def wait_for_stale_cell(self, cell): """ This is needed to switch a cell's mode and refocus it, or to render it. - Warning: there is currently no way to do this when changing between + Warning: there is currently no way to do this when changing between markdown and raw cells. """ wait = WebDriverWait(self.browser, 10) @@ -234,15 +234,15 @@ def wait_for_cell_output(self, index=0, timeout=10): ) def set_cell_metadata(self, index, key, value): - JS = 'Jupyter.notebook.get_cell({}).metadata.{} = {}'.format(index, key, value) + JS = f'Jupyter.notebook.get_cell({index}).metadata.{key} = {value}' return self.browser.execute_script(JS) def get_cell_type(self, index=0): - JS = 'return Jupyter.notebook.get_cell({}).cell_type'.format(index) + JS = f'return Jupyter.notebook.get_cell({index}).cell_type' return self.browser.execute_script(JS) - + def set_cell_input_prompt(self, index, prmpt_val): - JS = 'Jupyter.notebook.get_cell({}).set_input_prompt({})'.format(index, prmpt_val) + JS = f'Jupyter.notebook.get_cell({index}).set_input_prompt({prmpt_val})' self.browser.execute_script(JS) def edit_cell(self, cell=None, index=0, content="", render=False): @@ -267,7 +267,7 @@ def edit_cell(self, cell=None, index=0, content="", render=False): def execute_cell(self, cell_or_index=None): if isinstance(cell_or_index, int): index = cell_or_index - elif isinstance(cell_or_index, WebElement): + elif isinstance(cell_or_index, WebElement): index = self.index(cell_or_index) else: raise TypeError("execute_cell only accepts a WebElement or an int") @@ -295,7 +295,7 @@ def delete_cell(self, index): def add_markdown_cell(self, index=-1, content="", render=True): self.add_cell(index, cell_type="markdown") self.edit_cell(index=index, content=content, render=render) - + def append(self, *values, cell_type="code"): for i, value in enumerate(values): if isinstance(value, str): @@ -303,10 +303,10 @@ def append(self, *values, cell_type="code"): content=value) else: raise TypeError("Don't know how to add cell from %r" % value) - + def extend(self, values): self.append(*values) - + def run_all(self): for cell in self: self.execute_cell(cell) @@ -320,7 +320,7 @@ def is_kernel_running(self): ) def clear_cell_output(self, index): - JS = 'Jupyter.notebook.clear_output({})'.format(index) + JS = f'Jupyter.notebook.clear_output({index})' self.browser.execute_script(JS) @classmethod @@ -336,24 +336,24 @@ def select_kernel(browser, kernel_name='kernel-python3'): wait = WebDriverWait(browser, 10) new_button = wait.until(EC.element_to_be_clickable((By.ID, "new-dropdown-button"))) new_button.click() - kernel_selector = '#{} a'.format(kernel_name) + kernel_selector = f'#{kernel_name} a' kernel = wait_for_selector(browser, kernel_selector, single=True) kernel.click() @contextmanager def new_window(browser): - """Contextmanager for switching to & waiting for a window created. - - This context manager gives you the ability to create a new window inside + """Contextmanager for switching to & waiting for a window created. + + This context manager gives you the ability to create a new window inside the created context and it will switch you to that new window. - + Usage example: - + from notebook.tests.selenium.utils import new_window, Notebook - + ⋮ # something that creates a browser object - + with new_window(browser): select_kernel(browser, kernel_name=kernel_name) nb = Notebook(browser) @@ -400,7 +400,7 @@ def trigger_keystrokes(browser, *keys): browser.send_keys(getattr(Keys, keys[0].upper(), keys[0])) def validate_dualmode_state(notebook, mode, index): - '''Validate the entire dual mode state of the notebook. + '''Validate the entire dual mode state of the notebook. Checks if the specified cell is selected, and the mode and keyboard mode are the same. Depending on the mode given: Command: Checks that no cells are in focus or in edit mode. @@ -462,7 +462,7 @@ def is_focused_on(index): assert is_focused_on(None) #no focused cells assert is_only_cell_edit(None) #no cells in edit mode - + elif mode == 'edit': assert is_focused_on(index) #The specified cell is focused diff --git a/notebook/tests/test_files.py b/notebook/tests/test_files.py index b711945756..c7abbc3eba 100644 --- a/notebook/tests/test_files.py +++ b/notebook/tests/test_files.py @@ -1,12 +1,9 @@ """Test the /files/ handler.""" -import io import os -from unicodedata import normalize pjoin = os.path.join -import requests import json from nbformat import write @@ -16,21 +13,20 @@ from notebook.utils import url_path_join from .launchnotebook import NotebookTestBase -from ipython_genutils import py3compat class FilesTest(NotebookTestBase): def test_hidden_files(self): not_hidden = [ - u'å b', - u'å b/ç. d', + 'å b', + 'å b/ç. d', ] hidden = [ - u'.å b', - u'å b/.ç d', + '.å b', + 'å b/.ç d', ] dirs = not_hidden + hidden - + nbdir = self.notebook_dir for d in dirs: path = pjoin(nbdir, d.replace('/', os.sep)) @@ -82,23 +78,23 @@ def test_contents_manager(self): nb = new_notebook( cells=[ - new_markdown_cell(u'Created by test ³'), + new_markdown_cell('Created by test ³'), new_code_cell("print(2*6)", outputs=[ new_output("stream", text="12"), ]) ] ) - with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', + with open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) - with io.open(pjoin(nbdir, 'test.bin'), 'wb') as f: + with open(pjoin(nbdir, 'test.bin'), 'wb') as f: f.write(b'\xff' + os.urandom(5)) f.close() - with io.open(pjoin(nbdir, 'test.txt'), 'w') as f: - f.write(u'foobar') + with open(pjoin(nbdir, 'test.txt'), 'w') as f: + f.write('foobar') f.close() r = self.request('GET', 'files/testnb.ipynb') @@ -116,14 +112,14 @@ def test_contents_manager(self): self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'text/plain; charset=UTF-8') self.assertEqual(r.text, 'foobar') - + def test_download(self): nbdir = self.notebook_dir - + text = 'hello' with open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(text) - + r = self.request('GET', 'files/test.txt') disposition = r.headers.get('Content-Disposition', '') self.assertNotIn('attachment', disposition) @@ -132,24 +128,24 @@ def test_download(self): disposition = r.headers.get('Content-Disposition', '') self.assertIn('attachment', disposition) self.assertIn("filename*=utf-8''test.txt", disposition) - + def test_view_html(self): nbdir = self.notebook_dir - + html = '
Test test
' with open(pjoin(nbdir, 'test.html'), 'w') as f: f.write(html) - + r = self.request('GET', 'view/test.html') self.assertEqual(r.status_code, 200) def test_old_files_redirect(self): """pre-2.0 'files/' prefixed links are properly redirected""" nbdir = self.notebook_dir - + os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) - + for prefix in ('', 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write(prefix + '/files/f1') diff --git a/notebook/tests/test_gateway.py b/notebook/tests/test_gateway.py index f84ba06369..de7615936f 100644 --- a/notebook/tests/test_gateway.py +++ b/notebook/tests/test_gateway.py @@ -136,23 +136,23 @@ class TestGateway(NotebookTestBase): @classmethod def setup_class(cls): GatewayClient.clear_instance() - super(TestGateway, cls).setup_class() + super().setup_class() @classmethod def teardown_class(cls): GatewayClient.clear_instance() - super(TestGateway, cls).teardown_class() + super().teardown_class() @classmethod def get_patch_env(cls): - test_env = super(TestGateway, cls).get_patch_env() + test_env = super().get_patch_env() test_env.update({'JUPYTER_GATEWAY_URL': TestGateway.mock_gateway_url, 'JUPYTER_GATEWAY_CONNECT_TIMEOUT': '44.4'}) return test_env @classmethod def get_argv(cls): - argv = super(TestGateway, cls).get_argv() + argv = super().get_argv() argv.extend(['--GatewayClient.request_timeout=96.0', '--GatewayClient.http_user=' + TestGateway.mock_http_user]) return argv diff --git a/notebook/tests/test_nbextensions.py b/notebook/tests/test_nbextensions.py index 3d9549658a..001d3d9bd2 100644 --- a/notebook/tests/test_nbextensions.py +++ b/notebook/tests/test_nbextensions.py @@ -69,9 +69,9 @@ def cleanup_tempdirs(): self.src = self.tempdir() self.files = files = [ - pjoin(u'ƒile'), - pjoin(u'∂ir', u'ƒile1'), - pjoin(u'∂ir', u'∂ir2', u'ƒile2'), + pjoin('ƒile'), + pjoin('∂ir', 'ƒile1'), + pjoin('∂ir', '∂ir2', 'ƒile2'), ] for file_name in files: fullpath = os.path.join(self.src, file_name) @@ -107,15 +107,15 @@ def cleanup_tempdirs(): def assert_dir_exists(self, path): if not os.path.exists(path): do_exist = os.listdir(os.path.dirname(path)) - self.fail(u"%s should exist (found %s)" % (path, do_exist)) + self.fail("%s should exist (found %s)" % (path, do_exist)) def assert_not_dir_exists(self, path): if os.path.exists(path): - self.fail(u"%s should not exist" % path) + self.fail("%s should not exist" % path) def assert_installed(self, relative_path, user=False): if user: - nbext = pjoin(self.data_dir, u'nbextensions') + nbext = pjoin(self.data_dir, 'nbextensions') else: nbext = self.system_nbext self.assert_dir_exists( @@ -124,7 +124,7 @@ def assert_installed(self, relative_path, user=False): def assert_not_installed(self, relative_path, user=False): if user: - nbext = pjoin(self.data_dir, u'nbextensions') + nbext = pjoin(self.data_dir, 'nbextensions') else: nbext = self.system_nbext self.assert_not_dir_exists( @@ -150,17 +150,17 @@ def test_create_nbextensions_user(self): with TemporaryDirectory() as td: install_nbextension(self.src, user=True) self.assert_installed( - pjoin(basename(self.src), u'ƒile'), + pjoin(basename(self.src), 'ƒile'), user=True ) def test_create_nbextensions_system(self): with TemporaryDirectory() as td: - self.system_nbext = pjoin(td, u'nbextensions') + self.system_nbext = pjoin(td, 'nbextensions') with patch.object(nbextensions, 'SYSTEM_JUPYTER_PATH', [td]): install_nbextension(self.src, user=False) self.assert_installed( - pjoin(basename(self.src), u'ƒile'), + pjoin(basename(self.src), 'ƒile'), user=False ) @@ -170,28 +170,28 @@ def test_single_file(self): self.assert_installed(file_name) def test_single_dir(self): - d = u'∂ir' + d = '∂ir' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) def test_single_dir_trailing_slash(self): - d = u'∂ir/' + d = '∂ir/' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) if os.name == 'nt': - d = u'∂ir\\' + d = '∂ir\\' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) def test_destination_file(self): file_name = self.files[0] - install_nbextension(pjoin(self.src, file_name), destination = u'ƒiledest') - self.assert_installed(u'ƒiledest') + install_nbextension(pjoin(self.src, file_name), destination = 'ƒiledest') + self.assert_installed('ƒiledest') def test_destination_dir(self): - d = u'∂ir' - install_nbextension(pjoin(self.src, d), destination = u'ƒiledest2') - self.assert_installed(pjoin(u'ƒiledest2', u'∂ir2', u'ƒile2')) + d = '∂ir' + install_nbextension(pjoin(self.src, d), destination = 'ƒiledest2') + self.assert_installed(pjoin('ƒiledest2', '∂ir2', 'ƒile2')) def test_install_nbextension(self): with self.assertRaises(TypeError): @@ -199,7 +199,7 @@ def test_install_nbextension(self): def test_overwrite_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) with open(src, 'w') as f: f.write('first') @@ -216,12 +216,12 @@ def test_overwrite_file(self): def test_overwrite_dir(self): with TemporaryDirectory() as src: base = basename(src) - fname = u'ƒ.js' + fname = 'ƒ.js' touch(pjoin(src, fname)) install_nbextension(src) self.assert_installed(pjoin(base, fname)) os.remove(pjoin(src, fname)) - fname2 = u'∂.js' + fname2 = '∂.js' touch(pjoin(src, fname2)) install_nbextension(src, overwrite=True) self.assert_installed(pjoin(base, fname2)) @@ -229,7 +229,7 @@ def test_overwrite_dir(self): def test_update_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) with open(src, 'w') as f: f.write('first') @@ -247,7 +247,7 @@ def test_update_file(self): def test_skip_old_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) mtime = touch(src) install_nbextension(src) @@ -311,7 +311,7 @@ def fake_urlretrieve(url, dest): def test_check_nbextension(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, user=True) @@ -323,7 +323,7 @@ def test_check_nbextension(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_install_symlink(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, symlink=True) @@ -335,8 +335,8 @@ def test_install_symlink(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_overwrite_broken_symlink(self): with TemporaryDirectory() as d: - f = u'ƒ.js' - f2 = u'ƒ2.js' + f = 'ƒ.js' + f2 = 'ƒ2.js' src = pjoin(d, f) src2 = pjoin(d, f2) touch(src) @@ -351,8 +351,8 @@ def test_overwrite_broken_symlink(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_install_symlink_destination(self): with TemporaryDirectory() as d: - f = u'ƒ.js' - flink = u'ƒlink.js' + f = 'ƒ.js' + flink = 'ƒlink.js' src = pjoin(d, f) touch(src) install_nbextension(src, symlink=True, destination=flink) @@ -367,7 +367,7 @@ def test_install_symlink_bad(self): install_nbextension("http://example.com/foo.js", symlink=True) with TemporaryDirectory() as d: - zf = u'ƒ.zip' + zf = 'ƒ.zip' zsrc = pjoin(d, zf) with zipfile.ZipFile(zsrc, 'w') as z: z.writestr("a.js", b"b();") @@ -377,7 +377,7 @@ def test_install_symlink_bad(self): def test_install_destination_bad(self): with TemporaryDirectory() as d: - zf = u'ƒ.zip' + zf = 'ƒ.zip' zsrc = pjoin(d, zf) with zipfile.ZipFile(zsrc, 'w') as z: z.writestr("a.js", b"b();") @@ -387,24 +387,24 @@ def test_install_destination_bad(self): def test_nbextension_enable(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, user=True) - enable_nbextension(section='notebook', require=u'ƒ') + enable_nbextension(section='notebook', require='ƒ') config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) + enabled = cm.get('notebook').get('load_extensions', {}).get('ƒ', False) assert enabled def test_nbextension_disable(self): self.test_nbextension_enable() - disable_nbextension(section='notebook', require=u'ƒ') + disable_nbextension(section='notebook', require='ƒ') config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) + enabled = cm.get('notebook').get('load_extensions', {}).get('ƒ', False) assert not enabled diff --git a/notebook/tests/test_notebookapp.py b/notebook/tests/test_notebookapp.py index d48501b935..88956b9b9d 100644 --- a/notebook/tests/test_notebookapp.py +++ b/notebook/tests/test_notebookapp.py @@ -211,7 +211,7 @@ class NotebookAppJSONLoggingTests(NotebookTestBase): """Tests for when json logging is enabled.""" @classmethod def setup_class(cls): - super(NotebookAppJSONLoggingTests, cls).setup_class() + super().setup_class() try: import json_logging cls.json_logging_available = True @@ -220,7 +220,7 @@ def setup_class(cls): @classmethod def get_patch_env(cls): - test_env = super(NotebookAppJSONLoggingTests, cls).get_patch_env() + test_env = super().get_patch_env() test_env.update({'JUPYTER_ENABLE_JSON_LOGGING': 'true'}) return test_env diff --git a/notebook/tests/test_paths.py b/notebook/tests/test_paths.py index 33f44afe4d..90830559b7 100644 --- a/notebook/tests/test_paths.py +++ b/notebook/tests/test_paths.py @@ -1,4 +1,3 @@ - import re from notebook.base.handlers import path_regex diff --git a/notebook/tests/test_serverextensions.py b/notebook/tests/test_serverextensions.py index 898647926b..256c557b79 100644 --- a/notebook/tests/test_serverextensions.py +++ b/notebook/tests/test_serverextensions.py @@ -31,7 +31,7 @@ def test_help_output(): outer_file = __file__ -class MockExtensionModule(object): +class MockExtensionModule: __file__ = outer_file @staticmethod @@ -41,13 +41,13 @@ def _jupyter_server_extension_paths(): }] loaded = False - + def load_jupyter_server_extension(self, app): self.loaded = True class MockEnvTestCase(TestCase): - + def tempdir(self): td = TemporaryDirectory() self.tempdirs.append(td) @@ -64,7 +64,7 @@ def setUp(self): self.system_config_dir = os.path.join(self.test_dir, 'system_config') self.system_path = [self.system_data_dir] self.system_config_path = [self.system_config_dir] - + self.patches = [] p = patch.dict('os.environ', { 'JUPYTER_CONFIG_DIR': self.config_dir, @@ -97,7 +97,7 @@ def setUp(self): self.assertEqual(paths.jupyter_config_path(), [self.config_dir] + self.system_config_path) self.assertEqual(extensions._get_config_dir(user=False), self.system_config_dir) self.assertEqual(paths.jupyter_path(), [self.data_dir] + self.system_path) - + def tearDown(self): for modulename in self._mock_extensions: sys.modules.pop(modulename) diff --git a/notebook/tests/test_utils.py b/notebook/tests/test_utils.py index 51f0e8accc..b6c386a2bc 100644 --- a/notebook/tests/test_utils.py +++ b/notebook/tests/test_utils.py @@ -84,7 +84,7 @@ def test_is_hidden(): def test_is_hidden_win32(): with TemporaryDirectory() as root: root = cast_unicode(root) - subdir1 = os.path.join(root, u'subdir') + subdir1 = os.path.join(root, 'subdir') os.makedirs(subdir1) assert not is_hidden(subdir1, root) r = ctypes.windll.kernel32.SetFileAttributesW(subdir1, 0x02) diff --git a/notebook/traittypes.py b/notebook/traittypes.py index 226657c1f4..3721cbdea1 100644 --- a/notebook/traittypes.py +++ b/notebook/traittypes.py @@ -75,7 +75,7 @@ class name where an object was defined. if article == "the" or (article is None and not inspect.isclass(value)): if name is not None: - result = "{} {}".format(typename, name) + result = f"{typename} {name}" if article is not None: return add_article(result, True, capital) else: @@ -294,7 +294,7 @@ class or its subclasses. Our implementation is quite different self.default_args = args self.default_kwargs = kw - super(InstanceFromClasses, self).__init__(**kwargs) + super().__init__(**kwargs) def instance_from_importable_klasses(self, value): "Check that a given class is a subclasses found in the klasses list." diff --git a/notebook/tree/tests/test_tree_handler.py b/notebook/tree/tests/test_tree_handler.py index 801185ae51..b602b9402b 100644 --- a/notebook/tree/tests/test_tree_handler.py +++ b/notebook/tree/tests/test_tree_handler.py @@ -1,13 +1,10 @@ """Test the /tree handlers""" import os -import io from notebook.utils import url_path_join from nbformat import write from nbformat.v4 import new_notebook from urllib.parse import urlparse -import requests - from notebook.tests.launchnotebook import NotebookTestBase class TreeTest(NotebookTestBase): @@ -16,12 +13,12 @@ def setUp(self): d = os.path.join(nbdir, 'foo') os.mkdir(d) - with io.open(os.path.join(d, 'bar.ipynb'), 'w', encoding='utf-8') as f: + with open(os.path.join(d, 'bar.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) - with io.open(os.path.join(d, 'baz.txt'), 'w', encoding='utf-8') as f: - f.write(u'flamingo') + with open(os.path.join(d, 'baz.txt'), 'w', encoding='utf-8') as f: + f.write('flamingo') self.base_url() diff --git a/notebook/utils.py b/notebook/utils.py index ef6d9e437c..585f727266 100644 --- a/notebook/utils.py +++ b/notebook/utils.py @@ -79,14 +79,14 @@ def url_escape(path): Turns '/foo bar/' into '/foo%20bar/' """ parts = py3compat.unicode_to_str(path, encoding='utf8').split('/') - return u'/'.join([quote(p) for p in parts]) + return '/'.join([quote(p) for p in parts]) def url_unescape(path): """Unescape special characters in a URL path Turns '/foo%20bar/' into '/foo bar/' """ - return u'/'.join([ + return '/'.join([ py3compat.str_to_unicode(unquote(p), encoding='utf8') for p in py3compat.unicode_to_str(path, encoding='utf8').split('/') ]) @@ -393,7 +393,7 @@ def unix_socket_in_use(socket_path): try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(socket_path) - except socket.error: + except OSError: return False else: return True diff --git a/notebook/view/handlers.py b/notebook/view/handlers.py index e788964662..bff604f558 100644 --- a/notebook/view/handlers.py +++ b/notebook/view/handlers.py @@ -13,7 +13,7 @@ class ViewHandler(IPythonHandler): def get(self, path): path = path.strip('/') if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, f'File does not exist: {path}') basename = path.rsplit('/', 1)[-1] file_url = url_path_join(self.base_url, 'files', url_escape(path)) diff --git a/setup.py b/setup.py index c54f9a1594..2ab8319d73 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ pip_message = 'This may be due to an out of date pip. Make sure you have pip >= 9.0.1.' try: import pip - pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]]) + pip_version = tuple(int(x) for x in pip.__version__.split('.')[:3]) if pip_version < (9, 0, 1) : pip_message = 'Your pip version is out of date, please install pip >= 9.0.1. '\ 'pip {} detected.'.format(pip.__version__) @@ -102,10 +102,10 @@ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9' + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], zip_safe = False, install_requires = [ @@ -135,7 +135,7 @@ 'test:sys_platform != "win32"': ['requests-unixsocket'], 'json-logging': ['json-logging'] }, - python_requires = '>=3.6', + python_requires = '>=3.7', entry_points = { 'console_scripts': [ 'jupyter-notebook = notebook.notebookapp:main', diff --git a/setupbase.py b/setupbase.py index 8849a85e3b..fd69363b9f 100644 --- a/setupbase.py +++ b/setupbase.py @@ -99,7 +99,7 @@ def find_package_data(): """ # This is not enough for these things to appear in a sdist. # We need to muck with the MANIFEST to get this to work - + # exclude components and less from the walk; # we will build the components separately excludes = [ @@ -119,12 +119,12 @@ def find_package_data(): continue for f in files: static_data.append(pjoin(parent, f)) - + # for verification purposes, explicitly add main.min.js # so that installation will fail if they are missing for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']: static_data.append(pjoin('static', app, 'js', 'main.min.js')) - + components = pjoin("static", "components") # select the components we actually need to install # (there are lots of resources we bundle for sdist-reasons that we don't actually use) @@ -173,10 +173,10 @@ def find_package_data(): mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'), mj('config', 'Safe.js'), ]) - + trees = [] mj_out = mj('jax', 'output') - + if os.path.exists(mj_out): for output in os.listdir(mj_out): path = pjoin(mj_out, output) @@ -210,7 +210,7 @@ def find_package_data(): 'notebook.services.api': ['api.yaml'], 'notebook.i18n': ['*/LC_MESSAGES/*.*'], } - + return package_data @@ -229,7 +229,7 @@ def check_package_data(package_data): def check_package_data_first(command): """decorator for checking package_data before running a given command - + Probably only needs to wrap build_py """ class DecoratedCommand(command): @@ -361,21 +361,21 @@ def run(self): class Bower(Command): description = "fetch static client-side components with bower" - + user_options = [ ('force', 'f', "force fetching of bower dependencies"), ] - + def initialize_options(self): self.force = False - + def finalize_options(self): self.force = bool(self.force) - + bower_dir = pjoin(static, 'components') node_modules = pjoin(repo_root, 'node_modules') sanitizer_dir = pjoin(bower_dir, 'sanitizer') - + def should_run(self): if self.force: return True @@ -402,15 +402,15 @@ def run(self): if not self.should_run(): print("bower dependencies up to date") return - + if self.should_run_npm(): print("installing build dependencies with npm") run(['npm', 'install'], cwd=repo_root) os.utime(self.node_modules, None) - + env = os.environ.copy() env['PATH'] = npm_path - + try: run( ['bower', 'install', '--allow-root', '--config.interactive=false'], @@ -453,9 +453,9 @@ def patch_out_bootstrap_bw_print(): class CompileCSS(Command): """Recompile Notebook CSS - + Regenerate the compiled CSS from LESS sources. - + Requires various dev dependencies, such as require and lessc. """ description = "Recompile Notebook CSS" @@ -479,7 +479,7 @@ def run(self): env['PATH'] = npm_path patch_out_bootstrap_bw_print() - + for src, dst in zip(self.sources, self.targets): try: run(['lessc', @@ -498,7 +498,7 @@ def run(self): class CompileJS(Command): """Rebuild Notebook Javascript main.min.js files and translation files. - + Calls require via build-main.js """ description = "Rebuild Notebook Javascript main.min.js files" @@ -514,7 +514,7 @@ def finalize_options(self): apps = ['notebook', 'tree', 'edit', 'terminal', 'auth'] targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ] - + def sources(self, name): """Generator yielding .js sources that an application depends on""" yield pjoin(repo_root, 'tools', 'build-main.js') @@ -535,7 +535,7 @@ def sources(self, name): continue for f in files: yield pjoin(parent, f) - + def should_run(self, name, target): if self.force or not os.path.exists(target): return True @@ -582,13 +582,13 @@ class JavascriptVersion(Command): """write the javascript version to notebook javascript""" description = "Write Jupyter version to javascript" user_options = [] - + def initialize_options(self): pass - + def finalize_options(self): pass - + def run(self): nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js") with open(nsfile) as f: @@ -597,7 +597,7 @@ def run(self): found = False for line in lines: if line.strip().startswith("Jupyter.version"): - line = ' Jupyter.version = "{0}";\n'.format(version) + line = f' Jupyter.version = "{version}";\n' found = True f.write(line) if not found: diff --git a/tools/secure_notebook.py b/tools/secure_notebook.py index 1984d33946..ec74399556 100644 --- a/tools/secure_notebook.py +++ b/tools/secure_notebook.py @@ -3,12 +3,10 @@ script to automatically setup notebook over SSL. Generate cert and keyfiles (rsa 1024) in ~/.ssh/, ask for a password, and add -the corresponding entries in the notebook json configuration file. +the corresponding entries in the notebook json configuration file. """ -import six - from notebook.auth import passwd from traitlets.config.loader import JSONFileConfigLoader, ConfigFileNotFound from jupyter_core.paths import jupyter_config_dir @@ -19,7 +17,6 @@ from OpenSSL import crypto from os.path import exists, join -import io import os import json import traceback @@ -33,7 +30,7 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): """ if exists(join(cert_dir, certfile)) or exists(join(cert_dir, keyfile)): - raise FileExistsError('{} or {} already exist in {}. Aborting.'.format(keyfile, certfile, cert_dir)) + raise FileExistsError(f'{keyfile} or {certfile} already exist in {cert_dir}. Aborting.') else: # create a key pair k = crypto.PKey() @@ -54,11 +51,11 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): cert.set_pubkey(k) cert.sign(k, 'sha256') - with io.open(join(cert_dir, certfile), "wt") as f: + with open(join(cert_dir, certfile), "wt") as f: f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf8')) os.chmod(join(cert_dir, certfile), 0o600) - - with io.open(join(cert_dir, keyfile), "wt") as f: + + with open(join(cert_dir, keyfile), "wt") as f: f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf8')) os.chmod(join(cert_dir, keyfile), 0o600) @@ -68,7 +65,7 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): def persist_config(mode=0o600): """Context manager that can be use to modify a config object - On exit of the context manager, the config will be written back to disk, + On exit of the context manager, the config will be written back to disk, by default with 600 permissions. """ @@ -81,8 +78,8 @@ def persist_config(mode=0o600): yield config filepath = os.path.join(jupyter_config_dir(), 'jupyter_notebook_config.json') - with io.open(filepath, 'w') as f: - f.write(six.u(json.dumps(config, indent=2))) + with open(filepath, 'w') as f: + f.write(json.dumps(config, indent=2)) try: os.chmod(filepath, mode) except Exception: