diff --git a/docs/development/settings.rst b/docs/development/settings.rst index 9150f1ecf5d..1bff1e19d63 100644 --- a/docs/development/settings.rst +++ b/docs/development/settings.rst @@ -97,11 +97,11 @@ Whether to include `django.contrib.admin` in the URL's. RTD_BUILD_MEDIA_STORAGE ----------------------- -Default: ``None`` +Default: ``readthedocs.builds.storage.BuildMediaFileSystemStorage`` Use this storage class to upload build artifacts to cloud storage (S3, Azure storage). This should be a dotted path to the relevant class (eg. ``'path.to.MyBuildMediaStorage'``). -This class should mixin :class:`readthedocs.builds.storage.BuildMediaStorageMixin`. +Your class should mixin :class:`readthedocs.builds.storage.BuildMediaStorageMixin`. ELASTICSEARCH_DSL diff --git a/readthedocs/builds/storage.py b/readthedocs/builds/storage.py index 389e279a1de..f65a5ea5cd9 100644 --- a/readthedocs/builds/storage.py +++ b/readthedocs/builds/storage.py @@ -64,10 +64,10 @@ def delete_directory(self, path): for folder_name in folders: if folder_name: # Recursively delete the subdirectory - self.delete_directory(safe_join(path, folder_name)) + self.delete_directory(self.join(path, folder_name)) for filename in files: if filename: - self.delete(safe_join(path, filename)) + self.delete(self.join(path, filename)) def copy_directory(self, source, destination): """ @@ -79,7 +79,7 @@ def copy_directory(self, source, destination): log.debug('Copying source directory %s to media storage at %s', source, destination) source = Path(source) for filepath in source.iterdir(): - sub_destination = safe_join(destination, filepath.name) + sub_destination = self.join(destination, filepath.name) if filepath.is_dir(): # Recursively copy the subdirectory self.copy_directory(filepath, sub_destination) @@ -87,6 +87,23 @@ def copy_directory(self, source, destination): with filepath.open('rb') as fd: self.save(sub_destination, fd) + def join(self, directory, filepath): + return safe_join(directory, filepath) + + def walk(self, top): + if top in ('', '/'): + raise SuspiciousFileOperation('Iterating all storage cannot be right') + + log.debug('Walking %s in media storage', top) + folders, files = self.listdir(self._dirpath(top)) + + yield top, folders, files + + for folder_name in folders: + if folder_name: + # Recursively walk the subdirectory + yield from self.walk(self.join(top, folder_name)) + class BuildMediaFileSystemStorage(BuildMediaStorageMixin, FileSystemStorage): diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index cef45a494ea..11bca8555f4 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1271,26 +1271,36 @@ def get_processed_json(self): Both lead to `foo/index.html` https://github.com/rtfd/readthedocs.org/issues/5368 """ - fjson_paths = [] - basename = os.path.splitext(self.path)[0] - fjson_paths.append(basename + '.fjson') - if basename.endswith('/index'): - new_basename = re.sub(r'\/index$', '', basename) - fjson_paths.append(new_basename + '.fjson') - - full_json_path = self.project.get_production_media_path( - type_='json', version_slug=self.version.slug, include_file=False - ) - try: - for fjson_path in fjson_paths: - file_path = os.path.join(full_json_path, fjson_path) - if os.path.exists(file_path): - return process_file(file_path) - except Exception: + file_path = None + + if settings.RTD_BUILD_MEDIA_STORAGE: + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + + fjson_paths = [] + basename = os.path.splitext(self.path)[0] + fjson_paths.append(basename + '.fjson') + if basename.endswith('/index'): + new_basename = re.sub(r'\/index$', '', basename) + fjson_paths.append(new_basename + '.fjson') + + storage_path = self.project.get_storage_path( + type_='json', version_slug=self.version.slug, include_file=False + ) + try: + for fjson_path in fjson_paths: + file_path = storage.join(storage_path, fjson_path) + if storage.exists(file_path): + return process_file(file_path) + except Exception: + log.warning( + 'Unhandled exception during search processing file: %s', + file_path, + ) + else: log.warning( - 'Unhandled exception during search processing file: %s', - file_path, + 'Skipping HTMLFile processing because of no storage backend' ) + return { 'path': file_path, 'title': '', diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py index 3097799ec7c..a42c42da195 100644 --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -777,105 +777,117 @@ def store_build_artifacts( :param pdf: whether to save PDF output :param epub: whether to save ePub output """ - if settings.RTD_BUILD_MEDIA_STORAGE: - log.info( + if not settings.RTD_BUILD_MEDIA_STORAGE: + log.warning( LOG_TEMPLATE, { 'project': self.version.project.slug, 'version': self.version.slug, - 'msg': 'Writing build artifacts to media storage', + 'msg': ( + 'RTD_BUILD_MEDIA_STORAGE is missing - ' + 'Not writing build artifacts to media storage' + ), }, ) + return - storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + log.info( + LOG_TEMPLATE, + { + 'project': self.version.project.slug, + 'version': self.version.slug, + 'msg': 'Writing build artifacts to media storage', + }, + ) - types_to_copy = [] - types_to_delete = [] + types_to_copy = [] + types_to_delete = [] - # HTML media - if html: - types_to_copy.append(('html', self.config.doctype)) + # HTML media + if html: + types_to_copy.append(('html', self.config.doctype)) - # Search media (JSON) - if search: - types_to_copy.append(('json', 'sphinx_search')) + # Search media (JSON) + if search: + types_to_copy.append(('json', 'sphinx_search')) - if localmedia: - types_to_copy.append(('htmlzip', 'sphinx_localmedia')) - else: - types_to_delete.append('htmlzip') + if localmedia: + types_to_copy.append(('htmlzip', 'sphinx_localmedia')) + else: + types_to_delete.append('htmlzip') - if pdf: - types_to_copy.append(('pdf', 'sphinx_pdf')) - else: - types_to_delete.append('pdf') + if pdf: + types_to_copy.append(('pdf', 'sphinx_pdf')) + else: + types_to_delete.append('pdf') - if epub: - types_to_copy.append(('epub', 'sphinx_epub')) - else: - types_to_delete.append('epub') + if epub: + types_to_copy.append(('epub', 'sphinx_epub')) + else: + types_to_delete.append('epub') - for media_type, build_type in types_to_copy: - from_path = self.version.project.artifact_path( - version=self.version.slug, - type_=build_type, - ) - to_path = self.version.project.get_storage_path( - type_=media_type, - version_slug=self.version.slug, - include_file=False, - version_type=self.version.type, - ) - log.info( + for media_type, build_type in types_to_copy: + from_path = self.version.project.artifact_path( + version=self.version.slug, + type_=build_type, + ) + to_path = self.version.project.get_storage_path( + type_=media_type, + version_slug=self.version.slug, + include_file=False, + version_type=self.version.type, + ) + log.info( + LOG_TEMPLATE, + { + 'project': self.version.project.slug, + 'version': self.version.slug, + 'msg': f'Writing {media_type} to media storage - {to_path}', + }, + ) + try: + storage.copy_directory(from_path, to_path) + except Exception: + # Ideally this should just be an IOError + # but some storage backends unfortunately throw other errors + log.exception( LOG_TEMPLATE, { 'project': self.version.project.slug, 'version': self.version.slug, - 'msg': f'Writing {media_type} to media storage - {to_path}', + 'msg': f'Error copying {from_path} to storage (not failing build)', }, ) - try: - storage.copy_directory(from_path, to_path) - except Exception: - # Ideally this should just be an IOError - # but some storage backends unfortunately throw other errors - log.exception( - LOG_TEMPLATE, - { - 'project': self.version.project.slug, - 'version': self.version.slug, - 'msg': f'Error copying {from_path} to storage (not failing build)', - }, - ) - for media_type in types_to_delete: - media_path = self.version.project.get_storage_path( - type_=media_type, - version_slug=self.version.slug, - include_file=False, - version_type=self.version.type, - ) - log.info( + for media_type in types_to_delete: + media_path = self.version.project.get_storage_path( + type_=media_type, + version_slug=self.version.slug, + include_file=False, + version_type=self.version.type, + ) + log.info( + LOG_TEMPLATE, + { + 'project': self.version.project.slug, + 'version': self.version.slug, + 'msg': f'Deleting {media_type} from media storage - {media_path}', + }, + ) + try: + storage.delete_directory(media_path) + except Exception: + # Ideally this should just be an IOError + # but some storage backends unfortunately throw other errors + log.exception( LOG_TEMPLATE, { 'project': self.version.project.slug, 'version': self.version.slug, - 'msg': f'Deleting {media_type} from media storage - {media_path}', + 'msg': f'Error deleting {media_path} from storage (not failing build)', }, ) - try: - storage.delete_directory(media_path) - except Exception: - # Ideally this should just be an IOError - # but some storage backends unfortunately throw other errors - log.exception( - LOG_TEMPLATE, - { - 'project': self.version.project.slug, - 'version': self.version.slug, - 'msg': f'Error deleting {media_path} from storage (not failing build)', - }, - ) def update_app_instances( self, @@ -1337,7 +1349,6 @@ def fileify(version_pk, commit, build): ) return - path = project.rtd_build_path(version.slug) log.info( LOG_TEMPLATE, { @@ -1347,13 +1358,13 @@ def fileify(version_pk, commit, build): } ) try: - changed_files = _create_imported_files(version, path, commit, build) + changed_files = _create_imported_files(version, commit, build) except Exception: changed_files = set() log.exception('Failed during ImportedFile creation') try: - _create_intersphinx_data(version, path, commit, build) + _create_intersphinx_data(version, commit, build) except Exception: log.exception('Failed during SphinxDomain creation') @@ -1363,30 +1374,38 @@ def fileify(version_pk, commit, build): log.exception('Failed during ImportedFile syncing') -def _create_intersphinx_data(version, path, commit, build): +def _create_intersphinx_data(version, commit, build): """ Create intersphinx data for this version. :param version: Version instance - :param path: Path to search :param commit: Commit that updated path :param build: Build id """ - - object_file = os.path.join(path, 'objects.inv') - if not os.path.exists(object_file): - log.debug('No objects.inv, skipping intersphinx indexing.') + if not settings.RTD_BUILD_MEDIA_STORAGE: + log.warning('RTD_BUILD_MEDIA_STORAGE is missing - Not updating intersphinx data') return - full_json_path = version.project.get_production_media_path( + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + + html_storage_path = version.project.get_storage_path( + type_='html', version_slug=version.slug, include_file=False + ) + json_storage_path = version.project.get_storage_path( type_='json', version_slug=version.slug, include_file=False ) - type_file = os.path.join(full_json_path, 'readthedocs-sphinx-domain-names.json') + + object_file = storage.join(html_storage_path, 'objects.inv') + if not storage.exists(object_file): + log.debug('No objects.inv, skipping intersphinx indexing.') + return + + type_file = storage.join(json_storage_path, 'readthedocs-sphinx-domain-names.json') types = {} titles = {} - if os.path.exists(type_file): + if storage.exists(type_file): try: - data = json.load(open(type_file)) + data = json.load(storage.open(type_file)) types = data['types'] titles = data['titles'] except Exception: @@ -1406,7 +1425,13 @@ def warn(self, msg): log.warning('Sphinx MockApp: %s', msg) # Re-create all objects from the new build of the version - invdata = intersphinx.fetch_inventory(MockApp(), '', object_file) + object_file_url = storage.url(object_file) + if object_file_url.startswith('/'): + # Filesystem backed storage simply prepends MEDIA_URL to the path to get the URL + # This can cause an issue if MEDIA_URL is not fully qualified + object_file_url = 'http://' + settings.PRODUCTION_DOMAIN + object_file_url + + invdata = intersphinx.fetch_inventory(MockApp(), '', object_file_url) for key, value in sorted(invdata.items() or {}): domain, _type = key.split(':') for name, einfo in sorted(value.items()): @@ -1505,31 +1530,47 @@ def clean_build(version_pk): return True -def _create_imported_files(version, path, commit, build): +def _create_imported_files(version, commit, build): """ Create imported files for version. :param version: Version instance - :param path: Path to search :param commit: Commit that updated path :param build: Build id :returns: paths of changed files :rtype: set """ + if not settings.RTD_BUILD_MEDIA_STORAGE: + log.warning('RTD_BUILD_MEDIA_STORAGE is missing - Not updating imported files') + return + + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + changed_files = set() + # Re-create all objects from the new build of the version - for root, __, filenames in os.walk(path): + storage_path = version.project.get_storage_path( + type_='html', version_slug=version.slug, include_file=False + ) + for root, __, filenames in storage.walk(storage_path): for filename in filenames: if filename.endswith('.html'): model_class = HTMLFile - else: + elif version.project.cdn_enabled: + # We need to track all files for CDN enabled projects so the files can be purged model_class = ImportedFile + else: + # For projects not behind a CDN, we don't care about non-HTML + continue + + full_path = storage.join(root, filename) + + # Generate a relative path for storage similar to os.path.relpath + relpath = full_path.replace(storage_path, '', 1).lstrip('/') - full_path = os.path.join(root, filename) - relpath = os.path.relpath(full_path, path) try: - md5 = hashlib.md5(open(full_path, 'rb').read()).hexdigest() + md5 = hashlib.md5(storage.open(full_path, 'rb').read()).hexdigest() except Exception: log.exception( 'Error while generating md5 for %s:%s:%s. Don\'t stop.', @@ -1799,11 +1840,14 @@ def remove_build_storage_paths(paths): :param paths: list of paths in build media storage to delete """ - if settings.RTD_BUILD_MEDIA_STORAGE: - storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() - for storage_path in paths: - log.info('Removing %s from media storage', storage_path) - storage.delete_directory(storage_path) + if not settings.RTD_BUILD_MEDIA_STORAGE: + log.warning('RTD_BUILD_MEDIA_STORAGE is missing - Not removing paths from media storage') + return + + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + for storage_path in paths: + log.info('Removing %s from media storage', storage_path) + storage.delete_directory(storage_path) @app.task(queue='web') diff --git a/readthedocs/rtd_tests/tests/test_build_storage.py b/readthedocs/rtd_tests/tests/test_build_storage.py new file mode 100644 index 00000000000..0b8443e3a09 --- /dev/null +++ b/readthedocs/rtd_tests/tests/test_build_storage.py @@ -0,0 +1,61 @@ +import os +import shutil +import tempfile + +from django.test import TestCase + +from readthedocs.builds.storage import BuildMediaFileSystemStorage + + +files_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files') + + +class TestBuildMediaStorage(TestCase): + def setUp(self): + self.test_media_dir = tempfile.mkdtemp() + self.storage = BuildMediaFileSystemStorage(location=self.test_media_dir) + + def tearDown(self): + shutil.rmtree(self.test_media_dir, ignore_errors=True) + + def test_copy_directory(self): + self.assertFalse(self.storage.exists('files/test.html')) + + self.storage.copy_directory(files_dir, 'files') + self.assertTrue(self.storage.exists('files/test.html')) + self.assertTrue(self.storage.exists('files/conf.py')) + self.assertTrue(self.storage.exists('files/api.fjson')) + self.assertTrue(self.storage.exists('files/api/index.html')) + + def test_delete_directory(self): + self.storage.copy_directory(files_dir, 'files') + dirs, files = self.storage.listdir('files') + self.assertEqual(dirs, ['api']) + self.assertCountEqual(files, ['api.fjson', 'conf.py', 'test.html']) + + self.storage.delete_directory('files/') + _, files = self.storage.listdir('files') + self.assertEqual(files, []) + # We don't check "dirs" here - in filesystem backed storages + # the empty directories are not deleted + # Cloud storage generally doesn't consider empty directories to exist + + dirs, files = self.storage.listdir('files/api') + self.assertEqual(dirs, []) + self.assertEqual(files, []) + + def test_walk(self): + self.storage.copy_directory(files_dir, 'files') + + output = list(self.storage.walk('files')) + self.assertEqual(len(output), 2) + + top, dirs, files = output[0] + self.assertEqual(top, 'files') + self.assertCountEqual(dirs, ['api']) + self.assertCountEqual(files, ['api.fjson', 'conf.py', 'test.html']) + + top, dirs, files = output[1] + self.assertEqual(top, 'files/api') + self.assertCountEqual(dirs, []) + self.assertCountEqual(files, ['index.html']) diff --git a/readthedocs/rtd_tests/tests/test_imported_file.py b/readthedocs/rtd_tests/tests/test_imported_file.py index 184983b8e77..db4f815b886 100644 --- a/readthedocs/rtd_tests/tests/test_imported_file.py +++ b/readthedocs/rtd_tests/tests/test_imported_file.py @@ -3,6 +3,8 @@ import os import mock +from django.conf import settings +from django.core.files.storage import get_storage_class from django.test import TestCase from readthedocs.projects.models import ImportedFile, Project, HTMLFile @@ -20,29 +22,51 @@ class ImportedFileTests(TestCase): fixtures = ['eric', 'test_data'] + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + def setUp(self): self.project = Project.objects.get(slug='pip') self.version = self.project.versions.first() - def _manage_imported_files(self, version, path, commit, build): + self.test_dir = os.path.join(base_dir, 'files') + self._copy_storage_dir() + + def _manage_imported_files(self, version, commit, build): """Helper function for the tests to create and sync ImportedFiles.""" - _create_imported_files(version, path, commit, build) + _create_imported_files(version, commit, build) _sync_imported_files(version, build, set()) + def _copy_storage_dir(self): + """Copy the test directory (rtd_tests/files) to storage""" + self.storage.copy_directory( + self.test_dir, + self.project.get_storage_path( + type_='html', + version_slug=self.version.slug, + include_file=False, + ), + ) + def test_properly_created(self): - test_dir = os.path.join(base_dir, 'files') + # Only 2 files in the directory is HTML (test.html, api/index.html) self.assertEqual(ImportedFile.objects.count(), 0) - self._manage_imported_files(self.version, test_dir, 'commit01', 1) - self.assertEqual(ImportedFile.objects.count(), 4) - self._manage_imported_files(self.version, test_dir, 'commit01', 2) + self._manage_imported_files(self.version, 'commit01', 1) + self.assertEqual(ImportedFile.objects.count(), 2) + self._manage_imported_files(self.version, 'commit01', 2) + self.assertEqual(ImportedFile.objects.count(), 2) + + self.project.cdn_enabled = True + self.project.save() + + # CDN enabled projects => save all files + self._manage_imported_files(self.version, 'commit01', 3) self.assertEqual(ImportedFile.objects.count(), 4) def test_update_commit(self): - test_dir = os.path.join(base_dir, 'files') self.assertEqual(ImportedFile.objects.count(), 0) - self._manage_imported_files(self.version, test_dir, 'commit01', 1) + self._manage_imported_files(self.version, 'commit01', 1) self.assertEqual(ImportedFile.objects.first().commit, 'commit01') - self._manage_imported_files(self.version, test_dir, 'commit02', 2) + self._manage_imported_files(self.version, 'commit02', 2) self.assertEqual(ImportedFile.objects.first().commit, 'commit02') def test_update_content(self): @@ -52,16 +76,20 @@ def test_update_content(self): with open(os.path.join(test_dir, 'test.html'), 'w+') as f: f.write('Woo') - self._manage_imported_files(self.version, test_dir, 'commit01', 1) + self._copy_storage_dir() + + self._manage_imported_files(self.version, 'commit01', 1) self.assertEqual(ImportedFile.objects.get(name='test.html').md5, 'c7532f22a052d716f7b2310fb52ad981') + self.assertEqual(ImportedFile.objects.count(), 2) with open(os.path.join(test_dir, 'test.html'), 'w+') as f: f.write('Something Else') - self._manage_imported_files(self.version, test_dir, 'commit02', 2) - self.assertNotEqual(ImportedFile.objects.get(name='test.html').md5, 'c7532f22a052d716f7b2310fb52ad981') + self._copy_storage_dir() - self.assertEqual(ImportedFile.objects.count(), 4) + self._manage_imported_files(self.version, 'commit02', 2) + self.assertNotEqual(ImportedFile.objects.get(name='test.html').md5, 'c7532f22a052d716f7b2310fb52ad981') + self.assertEqual(ImportedFile.objects.count(), 2) @mock.patch('readthedocs.projects.tasks.os.path.exists') def test_create_intersphinx_data(self, mock_exists): @@ -100,10 +128,8 @@ def test_create_intersphinx_data(self, mock_exists): return_value=test_objects_inv ) as mock_fetch_inventory: - test_dir = os.path.join(base_dir, 'files') - - _create_imported_files(self.version, test_dir, 'commit01', 1) - _create_intersphinx_data(self.version, test_dir, 'commit01', 1) + _create_imported_files(self.version, 'commit01', 1) + _create_intersphinx_data(self.version, 'commit01', 1) # there will be two html files, # `api/index.html` and `test.html` @@ -111,6 +137,10 @@ def test_create_intersphinx_data(self, mock_exists): HTMLFile.objects.all().count(), 2 ) + self.assertEqual( + HTMLFile.objects.filter(path='test.html').count(), + 1 + ) self.assertEqual( HTMLFile.objects.filter(path='api/index.html').count(), 1 diff --git a/readthedocs/rtd_tests/tests/test_search_json_parsing.py b/readthedocs/rtd_tests/tests/test_search_json_parsing.py index 716dc904b41..33c001c53de 100644 --- a/readthedocs/rtd_tests/tests/test_search_json_parsing.py +++ b/readthedocs/rtd_tests/tests/test_search_json_parsing.py @@ -2,21 +2,20 @@ import os from django.test import TestCase +from django.test.utils import override_settings from readthedocs.search.parse_json import process_file base_dir = os.path.dirname(os.path.dirname(__file__)) + class TestHacks(TestCase): + @override_settings(MEDIA_ROOT=base_dir) def test_h2_parsing(self): - data = process_file( - os.path.join( - base_dir, - 'files/api.fjson', - ), - ) + data = process_file('files/api.fjson') + self.assertEqual(data['path'], 'api') self.assertEqual(data['sections'][1]['id'], 'a-basic-api-client-using-slumber') diff --git a/readthedocs/search/parse_json.py b/readthedocs/search/parse_json.py index 19c81a6ce9b..800564c5501 100644 --- a/readthedocs/search/parse_json.py +++ b/readthedocs/search/parse_json.py @@ -1,9 +1,11 @@ """Functions related to converting content into dict/JSON structures.""" -import codecs import json import logging +from django.conf import settings +from django.core.files.storage import get_storage_class + from pyquery import PyQuery @@ -52,13 +54,21 @@ def generate_sections_from_pyquery(body): } -def process_file(fjson_filename): +def process_file(fjson_storage_path): """Read the fjson file from disk and parse it into a structured dict.""" + if not settings.RTD_BUILD_MEDIA_STORAGE: + log.warning('RTD_BUILD_MEDIA_STORAGE is missing - Not updating intersphinx data') + raise RuntimeError('RTD_BUILD_MEDIA_STORAGE is missing - Not updating intersphinx data') + + storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() + + log.debug('Processing JSON file for indexing: %s', fjson_storage_path) + try: - with codecs.open(fjson_filename, encoding='utf-8', mode='r') as f: + with storage.open(fjson_storage_path, mode='r') as f: file_contents = f.read() except IOError: - log.info('Unable to read file: %s', fjson_filename) + log.info('Unable to read file: %s', fjson_storage_path) raise data = json.loads(file_contents) sections = [] @@ -68,19 +78,19 @@ def process_file(fjson_filename): if 'current_page_name' in data: path = data['current_page_name'] else: - log.info('Unable to index file due to no name %s', fjson_filename) + log.info('Unable to index file due to no name %s', fjson_storage_path) if data.get('body'): body = PyQuery(data['body']) sections.extend(generate_sections_from_pyquery(body)) else: - log.info('Unable to index content for: %s', fjson_filename) + log.info('Unable to index content for: %s', fjson_storage_path) if 'title' in data: title = data['title'] title = PyQuery(data['title']).text().replace('ΒΆ', '').strip() else: - log.info('Unable to index title for: %s', fjson_filename) + log.info('Unable to index title for: %s', fjson_storage_path) return { 'path': path, diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index badddfcf813..a546b10e439 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -233,9 +233,9 @@ def USE_PROMOS(self): # noqa ] PYTHON_MEDIA = False - # Optional Django Storage subclass used to write build artifacts to cloud or local storage - # https://docs.readthedocs.io/en/stable/settings.html#build-media-storage - RTD_BUILD_MEDIA_STORAGE = None + # Django Storage subclass used to write build artifacts to cloud or local storage + # https://docs.readthedocs.io/page/development/settings.html#rtd-build-media-storage + RTD_BUILD_MEDIA_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage' TEMPLATES = [ {