diff --git a/.travis.yml b/.travis.yml
index 6743bf289bb..802c8986cc0 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,11 +1,10 @@
language: python
python:
- - 2.7
- 3.6
-env:
- - ES_VERSION=1.3.9 ES_DOWNLOAD_URL=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz
matrix:
include:
+ - python: 3.6
+ env: TOXENV=py36 ES_VERSION=1.3.9 ES_DOWNLOAD_URL=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz
- python: 3.6
env: TOXENV=docs
- python: 3.6
@@ -45,6 +44,6 @@ notifications:
branches:
only:
- - master
+ - master
- rel # Community release branch
- relcorp # Corporate release branch
diff --git a/common b/common
index 46aad68c905..2c428603279 160000
--- a/common
+++ b/common
@@ -1 +1 @@
-Subproject commit 46aad68c905ff843559b39cb52b5d54e586115c4
+Subproject commit 2c42860327916ec66f3aed7cf3d7bab809438ab4
diff --git a/readthedocs/__init__.py b/readthedocs/__init__.py
index bf6f944d7e6..8f8c9ee7c80 100644
--- a/readthedocs/__init__.py
+++ b/readthedocs/__init__.py
@@ -1,9 +1,10 @@
# -*- coding: utf-8 -*-
+
"""Read the Docs."""
import os.path
-from future.moves.configparser import RawConfigParser
+from configparser import RawConfigParser
def get_version(setupcfg_path):
diff --git a/readthedocs/analytics/__init__.py b/readthedocs/analytics/__init__.py
index f2531d76edb..b25cef94087 100644
--- a/readthedocs/analytics/__init__.py
+++ b/readthedocs/analytics/__init__.py
@@ -1,3 +1,5 @@
-"""App init"""
+# -*- coding: utf-8 -*-
-default_app_config = 'readthedocs.analytics.apps.AnalyticsAppConfig' # noqa
+"""App init."""
+
+default_app_config = 'readthedocs.analytics.apps.AnalyticsAppConfig' # noqa
diff --git a/readthedocs/analytics/apps.py b/readthedocs/analytics/apps.py
index afdea7f5dec..c2abb221a18 100644
--- a/readthedocs/analytics/apps.py
+++ b/readthedocs/analytics/apps.py
@@ -1,12 +1,13 @@
+# -*- coding: utf-8 -*-
+
"""Django app config for the analytics app."""
-from __future__ import absolute_import
from django.apps import AppConfig
class AnalyticsAppConfig(AppConfig):
- """Analytics app init code"""
+ """Analytics app init code."""
name = 'readthedocs.analytics'
verbose_name = 'Analytics'
diff --git a/readthedocs/analytics/tasks.py b/readthedocs/analytics/tasks.py
index 6c1ec2cfce1..4e2cc957ee8 100644
--- a/readthedocs/analytics/tasks.py
+++ b/readthedocs/analytics/tasks.py
@@ -1,6 +1,6 @@
-"""Tasks for Read the Docs' analytics"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Tasks for Read the Docs' analytics."""
from django.conf import settings
@@ -11,24 +11,24 @@
DEFAULT_PARAMETERS = {
- 'v': '1', # analytics version (always 1)
- 'aip': '1', # anonymize IP
+ 'v': '1', # analytics version (always 1)
+ 'aip': '1', # anonymize IP
'tid': settings.GLOBAL_ANALYTICS_CODE,
# User data
- 'uip': None, # User IP address
- 'ua': None, # User agent
+ 'uip': None, # User IP address
+ 'ua': None, # User agent
# Application info
'an': 'Read the Docs',
- 'av': readthedocs.__version__, # App version
+ 'av': readthedocs.__version__, # App version
}
@app.task(queue='web')
def analytics_pageview(url, title=None, **kwargs):
"""
- Send a pageview to Google Analytics
+ Send a pageview to Google Analytics.
:see: https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters
:param url: the URL of the pageview
@@ -37,8 +37,8 @@ def analytics_pageview(url, title=None, **kwargs):
"""
data = {
't': 'pageview',
- 'dl': url, # URL of the pageview (required)
- 'dt': title, # Title of the page
+ 'dl': url, # URL of the pageview (required)
+ 'dt': title, # Title of the page
}
data.update(DEFAULT_PARAMETERS)
data.update(kwargs)
@@ -46,9 +46,12 @@ def analytics_pageview(url, title=None, **kwargs):
@app.task(queue='web')
-def analytics_event(event_category, event_action, event_label=None, event_value=None, **kwargs):
+def analytics_event(
+ event_category, event_action, event_label=None, event_value=None,
+ **kwargs
+):
"""
- Send an analytics event to Google Analytics
+ Send an analytics event to Google Analytics.
:see: https://developers.google.com/analytics/devguides/collection/protocol/v1/devguide#event
:param event_category: the category of the event
@@ -58,11 +61,11 @@ def analytics_event(event_category, event_action, event_label=None, event_value=
:param kwargs: extra event parameters to send to GA
"""
data = {
- 't': 'event', # GA event - don't change
- 'ec': event_category, # Event category (required)
- 'ea': event_action, # Event action (required)
- 'el': event_label, # Event label
- 'ev': event_value, # Event value (numeric)
+ 't': 'event', # GA event - don't change
+ 'ec': event_category, # Event category (required)
+ 'ea': event_action, # Event action (required)
+ 'el': event_label, # Event label
+ 'ev': event_value, # Event value (numeric)
}
data.update(DEFAULT_PARAMETERS)
data.update(kwargs)
diff --git a/readthedocs/analytics/tests.py b/readthedocs/analytics/tests.py
index 37b26957033..d3507d8642e 100644
--- a/readthedocs/analytics/tests.py
+++ b/readthedocs/analytics/tests.py
@@ -1,5 +1,4 @@
-from __future__ import absolute_import, unicode_literals
-
+# -*- coding: utf-8 -*-
from django.test import TestCase
from .utils import anonymize_ip_address, anonymize_user_agent
@@ -29,4 +28,3 @@ def test_anonymize_ua(self):
anonymize_user_agent('Some rare user agent'),
'Rare user agent',
)
-
diff --git a/readthedocs/analytics/utils.py b/readthedocs/analytics/utils.py
index 44eef551125..c358423e499 100644
--- a/readthedocs/analytics/utils.py
+++ b/readthedocs/analytics/utils.py
@@ -1,26 +1,23 @@
-"""Utilities related to analytics"""
+# -*- coding: utf-8 -*-
+
+"""Utilities related to analytics."""
-from __future__ import absolute_import, unicode_literals
import hashlib
+import ipaddress
import logging
+import requests
from django.conf import settings
-from django.utils.encoding import force_text, force_bytes
from django.utils.crypto import get_random_string
-import requests
+from django.utils.encoding import force_bytes, force_text
from user_agents import parse
-try:
- # Python 3.3+ only
- import ipaddress
-except ImportError:
- from .vendor import ipaddress
-log = logging.getLogger(__name__) # noqa
+log = logging.getLogger(__name__) # noqa
def get_client_ip(request):
- """Gets the real IP based on a request object"""
+ """Gets the real IP based on a request object."""
ip_address = request.META.get('REMOTE_ADDR')
# Get the original IP address (eg. "X-Forwarded-For: client, proxy1, proxy2")
@@ -32,7 +29,7 @@ def get_client_ip(request):
def anonymize_ip_address(ip_address):
- """Anonymizes an IP address by zeroing the last 2 bytes"""
+ """Anonymizes an IP address by zeroing the last 2 bytes."""
# Used to anonymize an IP by zero-ing out the last 2 bytes
ip_mask = int('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF0000', 16)
@@ -46,7 +43,7 @@ def anonymize_ip_address(ip_address):
def anonymize_user_agent(user_agent):
- """Anonymizes rare user agents"""
+ """Anonymizes rare user agents."""
# If the browser family is not recognized, this is a rare user agent
parsed_ua = parse(user_agent)
if parsed_ua.browser.family == 'Other' or parsed_ua.os.family == 'Other':
@@ -56,7 +53,7 @@ def anonymize_user_agent(user_agent):
def send_to_analytics(data):
- """Sends data to Google Analytics"""
+ """Sends data to Google Analytics."""
if data.get('uip') and data.get('ua'):
data['cid'] = generate_client_id(data['uip'], data['ua'])
@@ -74,7 +71,7 @@ def send_to_analytics(data):
resp = requests.post(
'https://www.google-analytics.com/collect',
data=data,
- timeout=3, # seconds
+ timeout=3, # seconds
)
except requests.Timeout:
log.warning('Timeout sending to Google Analytics')
@@ -85,10 +82,10 @@ def send_to_analytics(data):
def generate_client_id(ip_address, user_agent):
"""
- Create an advertising ID
+ Create an advertising ID.
- This simplifies things but essentially if a user has the same IP and same UA,
- this will treat them as the same user for analytics purposes
+ This simplifies things but essentially if a user has the same IP and same
+ UA, this will treat them as the same user for analytics purposes
"""
salt = b'advertising-client-id'
diff --git a/readthedocs/analytics/vendor/__init__.py b/readthedocs/analytics/vendor/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/readthedocs/analytics/vendor/ipaddress.py b/readthedocs/analytics/vendor/ipaddress.py
deleted file mode 100644
index b81d477bf96..00000000000
--- a/readthedocs/analytics/vendor/ipaddress.py
+++ /dev/null
@@ -1,2420 +0,0 @@
-# flake8: noqa
-# Copyright 2007 Google Inc.
-# Licensed to PSF under a Contributor Agreement.
-
-"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
-
-This library is used to create/poke/manipulate IPv4 and IPv6 addresses
-and networks.
-
-"""
-
-from __future__ import unicode_literals
-
-
-import itertools
-import struct
-
-__version__ = '1.0.22'
-
-# Compatibility functions
-_compat_int_types = (int,)
-try:
- _compat_int_types = (int, long)
-except NameError:
- pass
-try:
- _compat_str = unicode
-except NameError:
- _compat_str = str
- assert bytes != str
-if b'\0'[0] == 0: # Python 3 semantics
- def _compat_bytes_to_byte_vals(byt):
- return byt
-else:
- def _compat_bytes_to_byte_vals(byt):
- return [struct.unpack(b'!B', b)[0] for b in byt]
-try:
- _compat_int_from_byte_vals = int.from_bytes
-except AttributeError:
- def _compat_int_from_byte_vals(bytvals, endianess):
- assert endianess == 'big'
- res = 0
- for bv in bytvals:
- assert isinstance(bv, _compat_int_types)
- res = (res << 8) + bv
- return res
-
-
-def _compat_to_bytes(intval, length, endianess):
- assert isinstance(intval, _compat_int_types)
- assert endianess == 'big'
- if length == 4:
- if intval < 0 or intval >= 2 ** 32:
- raise struct.error("integer out of range for 'I' format code")
- return struct.pack(b'!I', intval)
- elif length == 16:
- if intval < 0 or intval >= 2 ** 128:
- raise struct.error("integer out of range for 'QQ' format code")
- return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
- else:
- raise NotImplementedError()
-
-
-if hasattr(int, 'bit_length'):
- # Not int.bit_length , since that won't work in 2.7 where long exists
- def _compat_bit_length(i):
- return i.bit_length()
-else:
- def _compat_bit_length(i):
- for res in itertools.count():
- if i >> res == 0:
- return res
-
-
-def _compat_range(start, end, step=1):
- assert step > 0
- i = start
- while i < end:
- yield i
- i += step
-
-
-class _TotalOrderingMixin(object):
- __slots__ = ()
-
- # Helper that derives the other comparison operations from
- # __lt__ and __eq__
- # We avoid functools.total_ordering because it doesn't handle
- # NotImplemented correctly yet (http://bugs.python.org/issue10042)
- def __eq__(self, other):
- raise NotImplementedError
-
- def __ne__(self, other):
- equal = self.__eq__(other)
- if equal is NotImplemented:
- return NotImplemented
- return not equal
-
- def __lt__(self, other):
- raise NotImplementedError
-
- def __le__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented or not less:
- return self.__eq__(other)
- return less
-
- def __gt__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented:
- return NotImplemented
- equal = self.__eq__(other)
- if equal is NotImplemented:
- return NotImplemented
- return not (less or equal)
-
- def __ge__(self, other):
- less = self.__lt__(other)
- if less is NotImplemented:
- return NotImplemented
- return not less
-
-
-IPV4LENGTH = 32
-IPV6LENGTH = 128
-
-
-class AddressValueError(ValueError):
- """A Value Error related to the address."""
-
-
-class NetmaskValueError(ValueError):
- """A Value Error related to the netmask."""
-
-
-def ip_address(address):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP address. Either IPv4 or
- IPv6 addresses may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Address or IPv6Address object.
-
- Raises:
- ValueError: if the *address* passed isn't either a v4 or a v6
- address
-
- """
- try:
- return IPv4Address(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Address(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- if isinstance(address, bytes):
- raise AddressValueError(
- '%r does not appear to be an IPv4 or IPv6 address. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?' % address)
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
- address)
-
-
-def ip_network(address, strict=True):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP network. Either IPv4 or
- IPv6 networks may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Network or IPv6Network object.
-
- Raises:
- ValueError: if the string passed isn't either a v4 or a v6
- address. Or if the network has host bits set.
-
- """
- try:
- return IPv4Network(address, strict)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Network(address, strict)
- except (AddressValueError, NetmaskValueError):
- pass
-
- if isinstance(address, bytes):
- raise AddressValueError(
- '%r does not appear to be an IPv4 or IPv6 network. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?' % address)
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
- address)
-
-
-def ip_interface(address):
- """Take an IP string/int and return an object of the correct type.
-
- Args:
- address: A string or integer, the IP address. Either IPv4 or
- IPv6 addresses may be supplied; integers less than 2**32 will
- be considered to be IPv4 by default.
-
- Returns:
- An IPv4Interface or IPv6Interface object.
-
- Raises:
- ValueError: if the string passed isn't either a v4 or a v6
- address.
-
- Notes:
- The IPv?Interface classes describe an Address on a particular
- Network, so they're basically a combination of both the Address
- and Network classes.
-
- """
- try:
- return IPv4Interface(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- try:
- return IPv6Interface(address)
- except (AddressValueError, NetmaskValueError):
- pass
-
- raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
- address)
-
-
-def v4_int_to_packed(address):
- """Represent an address as 4 packed bytes in network (big-endian) order.
-
- Args:
- address: An integer representation of an IPv4 IP address.
-
- Returns:
- The integer address packed as 4 bytes in network (big-endian) order.
-
- Raises:
- ValueError: If the integer is negative or too large to be an
- IPv4 IP address.
-
- """
- try:
- return _compat_to_bytes(address, 4, 'big')
- except (struct.error, OverflowError):
- raise ValueError("Address negative or too large for IPv4")
-
-
-def v6_int_to_packed(address):
- """Represent an address as 16 packed bytes in network (big-endian) order.
-
- Args:
- address: An integer representation of an IPv6 IP address.
-
- Returns:
- The integer address packed as 16 bytes in network (big-endian) order.
-
- """
- try:
- return _compat_to_bytes(address, 16, 'big')
- except (struct.error, OverflowError):
- raise ValueError("Address negative or too large for IPv6")
-
-
-def _split_optional_netmask(address):
- """Helper to split the netmask and raise AddressValueError if needed"""
- addr = _compat_str(address).split('/')
- if len(addr) > 2:
- raise AddressValueError("Only one '/' permitted in %r" % address)
- return addr
-
-
-def _find_address_range(addresses):
- """Find a sequence of sorted deduplicated IPv#Address.
-
- Args:
- addresses: a list of IPv#Address objects.
-
- Yields:
- A tuple containing the first and last IP addresses in the sequence.
-
- """
- it = iter(addresses)
- first = last = next(it)
- for ip in it:
- if ip._ip != last._ip + 1:
- yield first, last
- first = ip
- last = ip
- yield first, last
-
-
-def _count_righthand_zero_bits(number, bits):
- """Count the number of zero bits on the right hand side.
-
- Args:
- number: an integer.
- bits: maximum number of bits to count.
-
- Returns:
- The number of zero bits on the right hand side of the number.
-
- """
- if number == 0:
- return bits
- return min(bits, _compat_bit_length(~number & (number - 1)))
-
-
-def summarize_address_range(first, last):
- """Summarize a network range given the first and last IP addresses.
-
- Example:
- >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
- ... IPv4Address('192.0.2.130')))
- ... #doctest: +NORMALIZE_WHITESPACE
- [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
- IPv4Network('192.0.2.130/32')]
-
- Args:
- first: the first IPv4Address or IPv6Address in the range.
- last: the last IPv4Address or IPv6Address in the range.
-
- Returns:
- An iterator of the summarized IPv(4|6) network objects.
-
- Raise:
- TypeError:
- If the first and last objects are not IP addresses.
- If the first and last objects are not the same version.
- ValueError:
- If the last object is not greater than the first.
- If the version of the first address is not 4 or 6.
-
- """
- if (not (isinstance(first, _BaseAddress) and
- isinstance(last, _BaseAddress))):
- raise TypeError('first and last must be IP addresses, not networks')
- if first.version != last.version:
- raise TypeError("%s and %s are not of the same version" % (
- first, last))
- if first > last:
- raise ValueError('last IP address must be greater than first')
-
- if first.version == 4:
- ip = IPv4Network
- elif first.version == 6:
- ip = IPv6Network
- else:
- raise ValueError('unknown IP version')
-
- ip_bits = first._max_prefixlen
- first_int = first._ip
- last_int = last._ip
- while first_int <= last_int:
- nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
- _compat_bit_length(last_int - first_int + 1) - 1)
- net = ip((first_int, ip_bits - nbits))
- yield net
- first_int += 1 << nbits
- if first_int - 1 == ip._ALL_ONES:
- break
-
-
-def _collapse_addresses_internal(addresses):
- """Loops through the addresses, collapsing concurrent netblocks.
-
- Example:
-
- ip1 = IPv4Network('192.0.2.0/26')
- ip2 = IPv4Network('192.0.2.64/26')
- ip3 = IPv4Network('192.0.2.128/26')
- ip4 = IPv4Network('192.0.2.192/26')
-
- _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
- [IPv4Network('192.0.2.0/24')]
-
- This shouldn't be called directly; it is called via
- collapse_addresses([]).
-
- Args:
- addresses: A list of IPv4Network's or IPv6Network's
-
- Returns:
- A list of IPv4Network's or IPv6Network's depending on what we were
- passed.
-
- """
- # First merge
- to_merge = list(addresses)
- subnets = {}
- while to_merge:
- net = to_merge.pop()
- supernet = net.supernet()
- existing = subnets.get(supernet)
- if existing is None:
- subnets[supernet] = net
- elif existing != net:
- # Merge consecutive subnets
- del subnets[supernet]
- to_merge.append(supernet)
- # Then iterate over resulting networks, skipping subsumed subnets
- last = None
- for net in sorted(subnets.values()):
- if last is not None:
- # Since they are sorted,
- # last.network_address <= net.network_address is a given.
- if last.broadcast_address >= net.broadcast_address:
- continue
- yield net
- last = net
-
-
-def collapse_addresses(addresses):
- """Collapse a list of IP objects.
-
- Example:
- collapse_addresses([IPv4Network('192.0.2.0/25'),
- IPv4Network('192.0.2.128/25')]) ->
- [IPv4Network('192.0.2.0/24')]
-
- Args:
- addresses: An iterator of IPv4Network or IPv6Network objects.
-
- Returns:
- An iterator of the collapsed IPv(4|6)Network objects.
-
- Raises:
- TypeError: If passed a list of mixed version objects.
-
- """
- addrs = []
- ips = []
- nets = []
-
- # split IP addresses and networks
- for ip in addresses:
- if isinstance(ip, _BaseAddress):
- if ips and ips[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, ips[-1]))
- ips.append(ip)
- elif ip._prefixlen == ip._max_prefixlen:
- if ips and ips[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, ips[-1]))
- try:
- ips.append(ip.ip)
- except AttributeError:
- ips.append(ip.network_address)
- else:
- if nets and nets[-1]._version != ip._version:
- raise TypeError("%s and %s are not of the same version" % (
- ip, nets[-1]))
- nets.append(ip)
-
- # sort and dedup
- ips = sorted(set(ips))
-
- # find consecutive address ranges in the sorted sequence and summarize them
- if ips:
- for first, last in _find_address_range(ips):
- addrs.extend(summarize_address_range(first, last))
-
- return _collapse_addresses_internal(addrs + nets)
-
-
-def get_mixed_type_key(obj):
- """Return a key suitable for sorting between networks and addresses.
-
- Address and Network objects are not sortable by default; they're
- fundamentally different so the expression
-
- IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
-
- doesn't make any sense. There are some times however, where you may wish
- to have ipaddress sort these for you anyway. If you need to do this, you
- can use this function as the key= argument to sorted().
-
- Args:
- obj: either a Network or Address object.
- Returns:
- appropriate key.
-
- """
- if isinstance(obj, _BaseNetwork):
- return obj._get_networks_key()
- elif isinstance(obj, _BaseAddress):
- return obj._get_address_key()
- return NotImplemented
-
-
-class _IPAddressBase(_TotalOrderingMixin):
-
- """The mother class."""
-
- __slots__ = ()
-
- @property
- def exploded(self):
- """Return the longhand version of the IP address as a string."""
- return self._explode_shorthand_ip_string()
-
- @property
- def compressed(self):
- """Return the shorthand version of the IP address as a string."""
- return _compat_str(self)
-
- @property
- def reverse_pointer(self):
- """The name of the reverse DNS pointer for the IP address, e.g.:
- >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
- '1.0.0.127.in-addr.arpa'
- >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
- '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
-
- """
- return self._reverse_pointer()
-
- @property
- def version(self):
- msg = '%200s has no version specified' % (type(self),)
- raise NotImplementedError(msg)
-
- def _check_int_address(self, address):
- if address < 0:
- msg = "%d (< 0) is not permitted as an IPv%d address"
- raise AddressValueError(msg % (address, self._version))
- if address > self._ALL_ONES:
- msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
- raise AddressValueError(msg % (address, self._max_prefixlen,
- self._version))
-
- def _check_packed_address(self, address, expected_len):
- address_len = len(address)
- if address_len != expected_len:
- msg = (
- '%r (len %d != %d) is not permitted as an IPv%d address. '
- 'Did you pass in a bytes (str in Python 2) instead of'
- ' a unicode object?')
- raise AddressValueError(msg % (address, address_len,
- expected_len, self._version))
-
- @classmethod
- def _ip_int_from_prefix(cls, prefixlen):
- """Turn the prefix length into a bitwise netmask
-
- Args:
- prefixlen: An integer, the prefix length.
-
- Returns:
- An integer.
-
- """
- return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
-
- @classmethod
- def _prefix_from_ip_int(cls, ip_int):
- """Return prefix length from the bitwise netmask.
-
- Args:
- ip_int: An integer, the netmask in expanded bitwise format
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- ValueError: If the input intermingles zeroes & ones
- """
- trailing_zeroes = _count_righthand_zero_bits(ip_int,
- cls._max_prefixlen)
- prefixlen = cls._max_prefixlen - trailing_zeroes
- leading_ones = ip_int >> trailing_zeroes
- all_ones = (1 << prefixlen) - 1
- if leading_ones != all_ones:
- byteslen = cls._max_prefixlen // 8
- details = _compat_to_bytes(ip_int, byteslen, 'big')
- msg = 'Netmask pattern %r mixes zeroes & ones'
- raise ValueError(msg % details)
- return prefixlen
-
- @classmethod
- def _report_invalid_netmask(cls, netmask_str):
- msg = '%r is not a valid netmask' % netmask_str
- raise NetmaskValueError(msg)
-
- @classmethod
- def _prefix_from_prefix_string(cls, prefixlen_str):
- """Return prefix length from a numeric string
-
- Args:
- prefixlen_str: The string to be converted
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- NetmaskValueError: If the input is not a valid netmask
- """
- # int allows a leading +/- as well as surrounding whitespace,
- # so we ensure that isn't the case
- if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
- cls._report_invalid_netmask(prefixlen_str)
- try:
- prefixlen = int(prefixlen_str)
- except ValueError:
- cls._report_invalid_netmask(prefixlen_str)
- if not (0 <= prefixlen <= cls._max_prefixlen):
- cls._report_invalid_netmask(prefixlen_str)
- return prefixlen
-
- @classmethod
- def _prefix_from_ip_string(cls, ip_str):
- """Turn a netmask/hostmask string into a prefix length
-
- Args:
- ip_str: The netmask/hostmask to be converted
-
- Returns:
- An integer, the prefix length.
-
- Raises:
- NetmaskValueError: If the input is not a valid netmask/hostmask
- """
- # Parse the netmask/hostmask like an IP address.
- try:
- ip_int = cls._ip_int_from_string(ip_str)
- except AddressValueError:
- cls._report_invalid_netmask(ip_str)
-
- # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
- # Note that the two ambiguous cases (all-ones and all-zeroes) are
- # treated as netmasks.
- try:
- return cls._prefix_from_ip_int(ip_int)
- except ValueError:
- pass
-
- # Invert the bits, and try matching a /0+1+/ hostmask instead.
- ip_int ^= cls._ALL_ONES
- try:
- return cls._prefix_from_ip_int(ip_int)
- except ValueError:
- cls._report_invalid_netmask(ip_str)
-
- def __reduce__(self):
- return self.__class__, (_compat_str(self),)
-
-
-class _BaseAddress(_IPAddressBase):
-
- """A generic IP object.
-
- This IP class contains the version independent methods which are
- used by single IP addresses.
- """
-
- __slots__ = ()
-
- def __int__(self):
- return self._ip
-
- def __eq__(self, other):
- try:
- return (self._ip == other._ip and
- self._version == other._version)
- except AttributeError:
- return NotImplemented
-
- def __lt__(self, other):
- if not isinstance(other, _IPAddressBase):
- return NotImplemented
- if not isinstance(other, _BaseAddress):
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same version' % (
- self, other))
- if self._ip != other._ip:
- return self._ip < other._ip
- return False
-
- # Shorthand for Integer addition and subtraction. This is not
- # meant to ever support addition/subtraction of addresses.
- def __add__(self, other):
- if not isinstance(other, _compat_int_types):
- return NotImplemented
- return self.__class__(int(self) + other)
-
- def __sub__(self, other):
- if not isinstance(other, _compat_int_types):
- return NotImplemented
- return self.__class__(int(self) - other)
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
-
- def __str__(self):
- return _compat_str(self._string_from_ip_int(self._ip))
-
- def __hash__(self):
- return hash(hex(int(self._ip)))
-
- def _get_address_key(self):
- return (self._version, self)
-
- def __reduce__(self):
- return self.__class__, (self._ip,)
-
-
-class _BaseNetwork(_IPAddressBase):
-
- """A generic IP network object.
-
- This IP class contains the version independent methods which are
- used by networks.
-
- """
- def __init__(self, address):
- self._cache = {}
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
-
- def __str__(self):
- return '%s/%d' % (self.network_address, self.prefixlen)
-
- def hosts(self):
- """Generate Iterator over usable hosts in a network.
-
- This is like __iter__ except it doesn't return the network
- or broadcast addresses.
-
- """
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network + 1, broadcast):
- yield self._address_class(x)
-
- def __iter__(self):
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network, broadcast + 1):
- yield self._address_class(x)
-
- def __getitem__(self, n):
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- if n >= 0:
- if network + n > broadcast:
- raise IndexError('address out of range')
- return self._address_class(network + n)
- else:
- n += 1
- if broadcast + n < network:
- raise IndexError('address out of range')
- return self._address_class(broadcast + n)
-
- def __lt__(self, other):
- if not isinstance(other, _IPAddressBase):
- return NotImplemented
- if not isinstance(other, _BaseNetwork):
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same version' % (
- self, other))
- if self.network_address != other.network_address:
- return self.network_address < other.network_address
- if self.netmask != other.netmask:
- return self.netmask < other.netmask
- return False
-
- def __eq__(self, other):
- try:
- return (self._version == other._version and
- self.network_address == other.network_address and
- int(self.netmask) == int(other.netmask))
- except AttributeError:
- return NotImplemented
-
- def __hash__(self):
- return hash(int(self.network_address) ^ int(self.netmask))
-
- def __contains__(self, other):
- # always false if one is v4 and the other is v6.
- if self._version != other._version:
- return False
- # dealing with another network.
- if isinstance(other, _BaseNetwork):
- return False
- # dealing with another address
- else:
- # address
- return (int(self.network_address) <= int(other._ip) <=
- int(self.broadcast_address))
-
- def overlaps(self, other):
- """Tell if self is partly contained in other."""
- return self.network_address in other or (
- self.broadcast_address in other or (
- other.network_address in self or (
- other.broadcast_address in self)))
-
- @property
- def broadcast_address(self):
- x = self._cache.get('broadcast_address')
- if x is None:
- x = self._address_class(int(self.network_address) |
- int(self.hostmask))
- self._cache['broadcast_address'] = x
- return x
-
- @property
- def hostmask(self):
- x = self._cache.get('hostmask')
- if x is None:
- x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
- self._cache['hostmask'] = x
- return x
-
- @property
- def with_prefixlen(self):
- return '%s/%d' % (self.network_address, self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self.network_address, self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self.network_address, self.hostmask)
-
- @property
- def num_addresses(self):
- """Number of hosts in the current subnet."""
- return int(self.broadcast_address) - int(self.network_address) + 1
-
- @property
- def _address_class(self):
- # Returning bare address objects (rather than interfaces) allows for
- # more consistent behaviour across the network address, broadcast
- # address and individual host addresses.
- msg = '%200s has no associated address class' % (type(self),)
- raise NotImplementedError(msg)
-
- @property
- def prefixlen(self):
- return self._prefixlen
-
- def address_exclude(self, other):
- """Remove an address from a larger block.
-
- For example:
-
- addr1 = ip_network('192.0.2.0/28')
- addr2 = ip_network('192.0.2.1/32')
- list(addr1.address_exclude(addr2)) =
- [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
- IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
-
- or IPv6:
-
- addr1 = ip_network('2001:db8::1/32')
- addr2 = ip_network('2001:db8::1/128')
- list(addr1.address_exclude(addr2)) =
- [ip_network('2001:db8::1/128'),
- ip_network('2001:db8::2/127'),
- ip_network('2001:db8::4/126'),
- ip_network('2001:db8::8/125'),
- ...
- ip_network('2001:db8:8000::/33')]
-
- Args:
- other: An IPv4Network or IPv6Network object of the same type.
-
- Returns:
- An iterator of the IPv(4|6)Network objects which is self
- minus other.
-
- Raises:
- TypeError: If self and other are of differing address
- versions, or if other is not a network object.
- ValueError: If other is not completely contained by self.
-
- """
- if not self._version == other._version:
- raise TypeError("%s and %s are not of the same version" % (
- self, other))
-
- if not isinstance(other, _BaseNetwork):
- raise TypeError("%s is not a network object" % other)
-
- if not other.subnet_of(self):
- raise ValueError('%s not contained in %s' % (other, self))
- if other == self:
- return
-
- # Make sure we're comparing the network of other.
- other = other.__class__('%s/%s' % (other.network_address,
- other.prefixlen))
-
- s1, s2 = self.subnets()
- while s1 != other and s2 != other:
- if other.subnet_of(s1):
- yield s2
- s1, s2 = s1.subnets()
- elif other.subnet_of(s2):
- yield s1
- s1, s2 = s2.subnets()
- else:
- # If we got here, there's a bug somewhere.
- raise AssertionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (s1, s2, other))
- if s1 == other:
- yield s2
- elif s2 == other:
- yield s1
- else:
- # If we got here, there's a bug somewhere.
- raise AssertionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (s1, s2, other))
-
- def compare_networks(self, other):
- """Compare two IP objects.
-
- This is only concerned about the comparison of the integer
- representation of the network addresses. This means that the
- host bits aren't considered at all in this method. If you want
- to compare host bits, you can easily enough do a
- 'HostA._ip < HostB._ip'
-
- Args:
- other: An IP object.
-
- Returns:
- If the IP versions of self and other are the same, returns:
-
- -1 if self < other:
- eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
- IPv6Network('2001:db8::1000/124') <
- IPv6Network('2001:db8::2000/124')
- 0 if self == other
- eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
- IPv6Network('2001:db8::1000/124') ==
- IPv6Network('2001:db8::1000/124')
- 1 if self > other
- eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
- IPv6Network('2001:db8::2000/124') >
- IPv6Network('2001:db8::1000/124')
-
- Raises:
- TypeError if the IP versions are different.
-
- """
- # does this need to raise a ValueError?
- if self._version != other._version:
- raise TypeError('%s and %s are not of the same type' % (
- self, other))
- # self._version == other._version below here:
- if self.network_address < other.network_address:
- return -1
- if self.network_address > other.network_address:
- return 1
- # self.network_address == other.network_address below here:
- if self.netmask < other.netmask:
- return -1
- if self.netmask > other.netmask:
- return 1
- return 0
-
- def _get_networks_key(self):
- """Network-only key function.
-
- Returns an object that identifies this address' network and
- netmask. This function is a suitable "key" argument for sorted()
- and list.sort().
-
- """
- return (self._version, self.network_address, self.netmask)
-
- def subnets(self, prefixlen_diff=1, new_prefix=None):
- """The subnets which join to make the current subnet.
-
- In the case that self contains only one IP
- (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
- for IPv6), yield an iterator with just ourself.
-
- Args:
- prefixlen_diff: An integer, the amount the prefix length
- should be increased by. This should not be set if
- new_prefix is also set.
- new_prefix: The desired new prefix length. This must be a
- larger number (smaller prefix) than the existing prefix.
- This should not be set if prefixlen_diff is also set.
-
- Returns:
- An iterator of IPv(4|6) objects.
-
- Raises:
- ValueError: The prefixlen_diff is too small or too large.
- OR
- prefixlen_diff and new_prefix are both set or new_prefix
- is a smaller number than the current prefix (smaller
- number means a larger network)
-
- """
- if self._prefixlen == self._max_prefixlen:
- yield self
- return
-
- if new_prefix is not None:
- if new_prefix < self._prefixlen:
- raise ValueError('new prefix must be longer')
- if prefixlen_diff != 1:
- raise ValueError('cannot set prefixlen_diff and new_prefix')
- prefixlen_diff = new_prefix - self._prefixlen
-
- if prefixlen_diff < 0:
- raise ValueError('prefix length diff must be > 0')
- new_prefixlen = self._prefixlen + prefixlen_diff
-
- if new_prefixlen > self._max_prefixlen:
- raise ValueError(
- 'prefix length diff %d is invalid for netblock %s' % (
- new_prefixlen, self))
-
- start = int(self.network_address)
- end = int(self.broadcast_address) + 1
- step = (int(self.hostmask) + 1) >> prefixlen_diff
- for new_addr in _compat_range(start, end, step):
- current = self.__class__((new_addr, new_prefixlen))
- yield current
-
- def supernet(self, prefixlen_diff=1, new_prefix=None):
- """The supernet containing the current network.
-
- Args:
- prefixlen_diff: An integer, the amount the prefix length of
- the network should be decreased by. For example, given a
- /24 network and a prefixlen_diff of 3, a supernet with a
- /21 netmask is returned.
-
- Returns:
- An IPv4 network object.
-
- Raises:
- ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
- a negative prefix length.
- OR
- If prefixlen_diff and new_prefix are both set or new_prefix is a
- larger number than the current prefix (larger number means a
- smaller network)
-
- """
- if self._prefixlen == 0:
- return self
-
- if new_prefix is not None:
- if new_prefix > self._prefixlen:
- raise ValueError('new prefix must be shorter')
- if prefixlen_diff != 1:
- raise ValueError('cannot set prefixlen_diff and new_prefix')
- prefixlen_diff = self._prefixlen - new_prefix
-
- new_prefixlen = self.prefixlen - prefixlen_diff
- if new_prefixlen < 0:
- raise ValueError(
- 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
- (self.prefixlen, prefixlen_diff))
- return self.__class__((
- int(self.network_address) & (int(self.netmask) << prefixlen_diff),
- new_prefixlen))
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is a multicast address.
- See RFC 2373 2.7 for details.
-
- """
- return (self.network_address.is_multicast and
- self.broadcast_address.is_multicast)
-
- @staticmethod
- def _is_subnet_of(a, b):
- try:
- # Always false if one is v4 and the other is v6.
- if a._version != b._version:
- raise TypeError("%s and %s are not of the same version" (a, b))
- return (b.network_address <= a.network_address and
- b.broadcast_address >= a.broadcast_address)
- except AttributeError:
- raise TypeError("Unable to test subnet containment "
- "between %s and %s" % (a, b))
-
- def subnet_of(self, other):
- """Return True if this network is a subnet of other."""
- return self._is_subnet_of(self, other)
-
- def supernet_of(self, other):
- """Return True if this network is a supernet of other."""
- return self._is_subnet_of(other, self)
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within one of the
- reserved IPv6 Network ranges.
-
- """
- return (self.network_address.is_reserved and
- self.broadcast_address.is_reserved)
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is reserved per RFC 4291.
-
- """
- return (self.network_address.is_link_local and
- self.broadcast_address.is_link_local)
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv4-special-registry or iana-ipv6-special-registry.
-
- """
- return (self.network_address.is_private and
- self.broadcast_address.is_private)
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, True if the address is not reserved per
- iana-ipv4-special-registry or iana-ipv6-special-registry.
-
- """
- return not self.is_private
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 2373 2.5.2.
-
- """
- return (self.network_address.is_unspecified and
- self.broadcast_address.is_unspecified)
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback address as defined in
- RFC 2373 2.5.3.
-
- """
- return (self.network_address.is_loopback and
- self.broadcast_address.is_loopback)
-
-
-class _BaseV4(object):
-
- """Base IPv4 object.
-
- The following methods are used by IPv4 objects in both single IP
- addresses and networks.
-
- """
-
- __slots__ = ()
- _version = 4
- # Equivalent to 255.255.255.255 or 32 bits of 1's.
- _ALL_ONES = (2 ** IPV4LENGTH) - 1
- _DECIMAL_DIGITS = frozenset('0123456789')
-
- # the valid octets for host and netmasks. only useful for IPv4.
- _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
-
- _max_prefixlen = IPV4LENGTH
- # There are only a handful of valid v4 netmasks, so we cache them all
- # when constructed (see _make_netmask()).
- _netmask_cache = {}
-
- def _explode_shorthand_ip_string(self):
- return _compat_str(self)
-
- @classmethod
- def _make_netmask(cls, arg):
- """Make a (netmask, prefix_len) tuple from the given argument.
-
- Argument can be:
- - an integer (the prefix length)
- - a string representing the prefix length (e.g. "24")
- - a string representing the prefix netmask (e.g. "255.255.255.0")
- """
- if arg not in cls._netmask_cache:
- if isinstance(arg, _compat_int_types):
- prefixlen = arg
- else:
- try:
- # Check for a netmask in prefix length form
- prefixlen = cls._prefix_from_prefix_string(arg)
- except NetmaskValueError:
- # Check for a netmask or hostmask in dotted-quad form.
- # This may raise NetmaskValueError.
- prefixlen = cls._prefix_from_ip_string(arg)
- netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
- cls._netmask_cache[arg] = netmask, prefixlen
- return cls._netmask_cache[arg]
-
- @classmethod
- def _ip_int_from_string(cls, ip_str):
- """Turn the given IP string into an integer for comparison.
-
- Args:
- ip_str: A string, the IP ip_str.
-
- Returns:
- The IP ip_str as an integer.
-
- Raises:
- AddressValueError: if ip_str isn't a valid IPv4 Address.
-
- """
- if not ip_str:
- raise AddressValueError('Address cannot be empty')
-
- octets = ip_str.split('.')
- if len(octets) != 4:
- raise AddressValueError("Expected 4 octets in %r" % ip_str)
-
- try:
- return _compat_int_from_byte_vals(
- map(cls._parse_octet, octets), 'big')
- except ValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
-
- @classmethod
- def _parse_octet(cls, octet_str):
- """Convert a decimal octet into an integer.
-
- Args:
- octet_str: A string, the number to parse.
-
- Returns:
- The octet as an integer.
-
- Raises:
- ValueError: if the octet isn't strictly a decimal from [0..255].
-
- """
- if not octet_str:
- raise ValueError("Empty octet not permitted")
- # Whitelist the characters, since int() allows a lot of bizarre stuff.
- if not cls._DECIMAL_DIGITS.issuperset(octet_str):
- msg = "Only decimal digits permitted in %r"
- raise ValueError(msg % octet_str)
- # We do the length check second, since the invalid character error
- # is likely to be more informative for the user
- if len(octet_str) > 3:
- msg = "At most 3 characters permitted in %r"
- raise ValueError(msg % octet_str)
- # Convert to integer (we know digits are legal)
- octet_int = int(octet_str, 10)
- # Any octets that look like they *might* be written in octal,
- # and which don't look exactly the same in both octal and
- # decimal are rejected as ambiguous
- if octet_int > 7 and octet_str[0] == '0':
- msg = "Ambiguous (octal/decimal) value in %r not permitted"
- raise ValueError(msg % octet_str)
- if octet_int > 255:
- raise ValueError("Octet %d (> 255) not permitted" % octet_int)
- return octet_int
-
- @classmethod
- def _string_from_ip_int(cls, ip_int):
- """Turns a 32-bit integer into dotted decimal notation.
-
- Args:
- ip_int: An integer, the IP address.
-
- Returns:
- The IP address as a string in dotted decimal notation.
-
- """
- return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
- if isinstance(b, bytes)
- else b)
- for b in _compat_to_bytes(ip_int, 4, 'big'))
-
- def _is_hostmask(self, ip_str):
- """Test if the IP string is a hostmask (rather than a netmask).
-
- Args:
- ip_str: A string, the potential hostmask.
-
- Returns:
- A boolean, True if the IP string is a hostmask.
-
- """
- bits = ip_str.split('.')
- try:
- parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
- except ValueError:
- return False
- if len(parts) != len(bits):
- return False
- if parts[0] < parts[-1]:
- return True
- return False
-
- def _reverse_pointer(self):
- """Return the reverse DNS pointer name for the IPv4 address.
-
- This implements the method described in RFC1035 3.5.
-
- """
- reverse_octets = _compat_str(self).split('.')[::-1]
- return '.'.join(reverse_octets) + '.in-addr.arpa'
-
- @property
- def max_prefixlen(self):
- return self._max_prefixlen
-
- @property
- def version(self):
- return self._version
-
-
-class IPv4Address(_BaseV4, _BaseAddress):
-
- """Represent and manipulate single IPv4 Addresses."""
-
- __slots__ = ('_ip', '__weakref__')
-
- def __init__(self, address):
-
- """
- Args:
- address: A string or integer representing the IP
-
- Additionally, an integer can be passed, so
- IPv4Address('192.0.2.1') == IPv4Address(3221225985).
- or, more generally
- IPv4Address(int(IPv4Address('192.0.2.1'))) ==
- IPv4Address('192.0.2.1')
-
- Raises:
- AddressValueError: If ipaddress isn't a valid IPv4 address.
-
- """
- # Efficient constructor from integer.
- if isinstance(address, _compat_int_types):
- self._check_int_address(address)
- self._ip = address
- return
-
- # Constructing from a packed address
- if isinstance(address, bytes):
- self._check_packed_address(address, 4)
- bvs = _compat_bytes_to_byte_vals(address)
- self._ip = _compat_int_from_byte_vals(bvs, 'big')
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP string.
- addr_str = _compat_str(address)
- if '/' in addr_str:
- raise AddressValueError("Unexpected '/' in %r" % address)
- self._ip = self._ip_int_from_string(addr_str)
-
- @property
- def packed(self):
- """The binary representation of this address."""
- return v4_int_to_packed(self._ip)
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within the
- reserved IPv4 Network range.
-
- """
- return self in self._constants._reserved_network
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv4-special-registry.
-
- """
- return any(self in net for net in self._constants._private_networks)
-
- @property
- def is_global(self):
- return (
- self not in self._constants._public_network and
- not self.is_private)
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is multicast.
- See RFC 3171 for details.
-
- """
- return self in self._constants._multicast_network
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 5735 3.
-
- """
- return self == self._constants._unspecified_address
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback per RFC 3330.
-
- """
- return self in self._constants._loopback_network
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is link-local per RFC 3927.
-
- """
- return self in self._constants._linklocal_network
-
-
-class IPv4Interface(IPv4Address):
-
- def __init__(self, address):
- if isinstance(address, (bytes, _compat_int_types)):
- IPv4Address.__init__(self, address)
- self.network = IPv4Network(self._ip)
- self._prefixlen = self._max_prefixlen
- return
-
- if isinstance(address, tuple):
- IPv4Address.__init__(self, address[0])
- if len(address) > 1:
- self._prefixlen = int(address[1])
- else:
- self._prefixlen = self._max_prefixlen
-
- self.network = IPv4Network(address, strict=False)
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
- return
-
- addr = _split_optional_netmask(address)
- IPv4Address.__init__(self, addr[0])
-
- self.network = IPv4Network(address, strict=False)
- self._prefixlen = self.network._prefixlen
-
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
-
- def __str__(self):
- return '%s/%d' % (self._string_from_ip_int(self._ip),
- self.network.prefixlen)
-
- def __eq__(self, other):
- address_equal = IPv4Address.__eq__(self, other)
- if not address_equal or address_equal is NotImplemented:
- return address_equal
- try:
- return self.network == other.network
- except AttributeError:
- # An interface with an associated network is NOT the
- # same as an unassociated address. That's why the hash
- # takes the extra info into account.
- return False
-
- def __lt__(self, other):
- address_less = IPv4Address.__lt__(self, other)
- if address_less is NotImplemented:
- return NotImplemented
- try:
- return (self.network < other.network or
- self.network == other.network and address_less)
- except AttributeError:
- # We *do* allow addresses and interfaces to be sorted. The
- # unassociated address is considered less than all interfaces.
- return False
-
- def __hash__(self):
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
-
- __reduce__ = _IPAddressBase.__reduce__
-
- @property
- def ip(self):
- return IPv4Address(self._ip)
-
- @property
- def with_prefixlen(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.hostmask)
-
-
-class IPv4Network(_BaseV4, _BaseNetwork):
-
- """This class represents and manipulates 32-bit IPv4 network + addresses..
-
- Attributes: [examples for IPv4Network('192.0.2.0/27')]
- .network_address: IPv4Address('192.0.2.0')
- .hostmask: IPv4Address('0.0.0.31')
- .broadcast_address: IPv4Address('192.0.2.32')
- .netmask: IPv4Address('255.255.255.224')
- .prefixlen: 27
-
- """
- # Class to use when creating address objects
- _address_class = IPv4Address
-
- def __init__(self, address, strict=True):
-
- """Instantiate a new IPv4 network object.
-
- Args:
- address: A string or integer representing the IP [& network].
- '192.0.2.0/24'
- '192.0.2.0/255.255.255.0'
- '192.0.0.2/0.0.0.255'
- are all functionally the same in IPv4. Similarly,
- '192.0.2.1'
- '192.0.2.1/255.255.255.255'
- '192.0.2.1/32'
- are also functionally equivalent. That is to say, failing to
- provide a subnetmask will create an object with a mask of /32.
-
- If the mask (portion after the / in the argument) is given in
- dotted quad form, it is treated as a netmask if it starts with a
- non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
- starts with a zero field (e.g. 0.255.255.255 == /8), with the
- single exception of an all-zero mask which is treated as a
- netmask == /0. If no mask is given, a default of /32 is used.
-
- Additionally, an integer can be passed, so
- IPv4Network('192.0.2.1') == IPv4Network(3221225985)
- or, more generally
- IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
- IPv4Interface('192.0.2.1')
-
- Raises:
- AddressValueError: If ipaddress isn't a valid IPv4 address.
- NetmaskValueError: If the netmask isn't valid for
- an IPv4 address.
- ValueError: If strict is True and a network address is not
- supplied.
-
- """
- _BaseNetwork.__init__(self, address)
-
- # Constructing from a packed address or integer
- if isinstance(address, (_compat_int_types, bytes)):
- self.network_address = IPv4Address(address)
- self.netmask, self._prefixlen = self._make_netmask(
- self._max_prefixlen)
- # fixme: address/network test here.
- return
-
- if isinstance(address, tuple):
- if len(address) > 1:
- arg = address[1]
- else:
- # We weren't given an address[1]
- arg = self._max_prefixlen
- self.network_address = IPv4Address(address[0])
- self.netmask, self._prefixlen = self._make_netmask(arg)
- packed = int(self.network_address)
- if packed & int(self.netmask) != packed:
- if strict:
- raise ValueError('%s has host bits set' % self)
- else:
- self.network_address = IPv4Address(packed &
- int(self.netmask))
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr = _split_optional_netmask(address)
- self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
-
- if len(addr) == 2:
- arg = addr[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
-
- if strict:
- if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
- self.network_address):
- raise ValueError('%s has host bits set' % self)
- self.network_address = IPv4Address(int(self.network_address) &
- int(self.netmask))
-
- if self._prefixlen == (self._max_prefixlen - 1):
- self.hosts = self.__iter__
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, True if the address is not reserved per
- iana-ipv4-special-registry.
-
- """
- return (not (self.network_address in IPv4Network('100.64.0.0/10') and
- self.broadcast_address in IPv4Network('100.64.0.0/10')) and
- not self.is_private)
-
-
-class _IPv4Constants(object):
-
- _linklocal_network = IPv4Network('169.254.0.0/16')
-
- _loopback_network = IPv4Network('127.0.0.0/8')
-
- _multicast_network = IPv4Network('224.0.0.0/4')
-
- _public_network = IPv4Network('100.64.0.0/10')
-
- _private_networks = [
- IPv4Network('0.0.0.0/8'),
- IPv4Network('10.0.0.0/8'),
- IPv4Network('127.0.0.0/8'),
- IPv4Network('169.254.0.0/16'),
- IPv4Network('172.16.0.0/12'),
- IPv4Network('192.0.0.0/29'),
- IPv4Network('192.0.0.170/31'),
- IPv4Network('192.0.2.0/24'),
- IPv4Network('192.168.0.0/16'),
- IPv4Network('198.18.0.0/15'),
- IPv4Network('198.51.100.0/24'),
- IPv4Network('203.0.113.0/24'),
- IPv4Network('240.0.0.0/4'),
- IPv4Network('255.255.255.255/32'),
- ]
-
- _reserved_network = IPv4Network('240.0.0.0/4')
-
- _unspecified_address = IPv4Address('0.0.0.0')
-
-
-IPv4Address._constants = _IPv4Constants
-
-
-class _BaseV6(object):
-
- """Base IPv6 object.
-
- The following methods are used by IPv6 objects in both single IP
- addresses and networks.
-
- """
-
- __slots__ = ()
- _version = 6
- _ALL_ONES = (2 ** IPV6LENGTH) - 1
- _HEXTET_COUNT = 8
- _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
- _max_prefixlen = IPV6LENGTH
-
- # There are only a bunch of valid v6 netmasks, so we cache them all
- # when constructed (see _make_netmask()).
- _netmask_cache = {}
-
- @classmethod
- def _make_netmask(cls, arg):
- """Make a (netmask, prefix_len) tuple from the given argument.
-
- Argument can be:
- - an integer (the prefix length)
- - a string representing the prefix length (e.g. "24")
- - a string representing the prefix netmask (e.g. "255.255.255.0")
- """
- if arg not in cls._netmask_cache:
- if isinstance(arg, _compat_int_types):
- prefixlen = arg
- else:
- prefixlen = cls._prefix_from_prefix_string(arg)
- netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
- cls._netmask_cache[arg] = netmask, prefixlen
- return cls._netmask_cache[arg]
-
- @classmethod
- def _ip_int_from_string(cls, ip_str):
- """Turn an IPv6 ip_str into an integer.
-
- Args:
- ip_str: A string, the IPv6 ip_str.
-
- Returns:
- An int, the IPv6 address
-
- Raises:
- AddressValueError: if ip_str isn't a valid IPv6 Address.
-
- """
- if not ip_str:
- raise AddressValueError('Address cannot be empty')
-
- parts = ip_str.split(':')
-
- # An IPv6 address needs at least 2 colons (3 parts).
- _min_parts = 3
- if len(parts) < _min_parts:
- msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
- raise AddressValueError(msg)
-
- # If the address has an IPv4-style suffix, convert it to hexadecimal.
- if '.' in parts[-1]:
- try:
- ipv4_int = IPv4Address(parts.pop())._ip
- except AddressValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
- parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
- parts.append('%x' % (ipv4_int & 0xFFFF))
-
- # An IPv6 address can't have more than 8 colons (9 parts).
- # The extra colon comes from using the "::" notation for a single
- # leading or trailing zero part.
- _max_parts = cls._HEXTET_COUNT + 1
- if len(parts) > _max_parts:
- msg = "At most %d colons permitted in %r" % (
- _max_parts - 1, ip_str)
- raise AddressValueError(msg)
-
- # Disregarding the endpoints, find '::' with nothing in between.
- # This indicates that a run of zeroes has been skipped.
- skip_index = None
- for i in _compat_range(1, len(parts) - 1):
- if not parts[i]:
- if skip_index is not None:
- # Can't have more than one '::'
- msg = "At most one '::' permitted in %r" % ip_str
- raise AddressValueError(msg)
- skip_index = i
-
- # parts_hi is the number of parts to copy from above/before the '::'
- # parts_lo is the number of parts to copy from below/after the '::'
- if skip_index is not None:
- # If we found a '::', then check if it also covers the endpoints.
- parts_hi = skip_index
- parts_lo = len(parts) - skip_index - 1
- if not parts[0]:
- parts_hi -= 1
- if parts_hi:
- msg = "Leading ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # ^: requires ^::
- if not parts[-1]:
- parts_lo -= 1
- if parts_lo:
- msg = "Trailing ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # :$ requires ::$
- parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
- if parts_skipped < 1:
- msg = "Expected at most %d other parts with '::' in %r"
- raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
- else:
- # Otherwise, allocate the entire address to parts_hi. The
- # endpoints could still be empty, but _parse_hextet() will check
- # for that.
- if len(parts) != cls._HEXTET_COUNT:
- msg = "Exactly %d parts expected without '::' in %r"
- raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
- if not parts[0]:
- msg = "Leading ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # ^: requires ^::
- if not parts[-1]:
- msg = "Trailing ':' only permitted as part of '::' in %r"
- raise AddressValueError(msg % ip_str) # :$ requires ::$
- parts_hi = len(parts)
- parts_lo = 0
- parts_skipped = 0
-
- try:
- # Now, parse the hextets into a 128-bit integer.
- ip_int = 0
- for i in range(parts_hi):
- ip_int <<= 16
- ip_int |= cls._parse_hextet(parts[i])
- ip_int <<= 16 * parts_skipped
- for i in range(-parts_lo, 0):
- ip_int <<= 16
- ip_int |= cls._parse_hextet(parts[i])
- return ip_int
- except ValueError as exc:
- raise AddressValueError("%s in %r" % (exc, ip_str))
-
- @classmethod
- def _parse_hextet(cls, hextet_str):
- """Convert an IPv6 hextet string into an integer.
-
- Args:
- hextet_str: A string, the number to parse.
-
- Returns:
- The hextet as an integer.
-
- Raises:
- ValueError: if the input isn't strictly a hex number from
- [0..FFFF].
-
- """
- # Whitelist the characters, since int() allows a lot of bizarre stuff.
- if not cls._HEX_DIGITS.issuperset(hextet_str):
- raise ValueError("Only hex digits permitted in %r" % hextet_str)
- # We do the length check second, since the invalid character error
- # is likely to be more informative for the user
- if len(hextet_str) > 4:
- msg = "At most 4 characters permitted in %r"
- raise ValueError(msg % hextet_str)
- # Length check means we can skip checking the integer value
- return int(hextet_str, 16)
-
- @classmethod
- def _compress_hextets(cls, hextets):
- """Compresses a list of hextets.
-
- Compresses a list of strings, replacing the longest continuous
- sequence of "0" in the list with "" and adding empty strings at
- the beginning or at the end of the string such that subsequently
- calling ":".join(hextets) will produce the compressed version of
- the IPv6 address.
-
- Args:
- hextets: A list of strings, the hextets to compress.
-
- Returns:
- A list of strings.
-
- """
- best_doublecolon_start = -1
- best_doublecolon_len = 0
- doublecolon_start = -1
- doublecolon_len = 0
- for index, hextet in enumerate(hextets):
- if hextet == '0':
- doublecolon_len += 1
- if doublecolon_start == -1:
- # Start of a sequence of zeros.
- doublecolon_start = index
- if doublecolon_len > best_doublecolon_len:
- # This is the longest sequence of zeros so far.
- best_doublecolon_len = doublecolon_len
- best_doublecolon_start = doublecolon_start
- else:
- doublecolon_len = 0
- doublecolon_start = -1
-
- if best_doublecolon_len > 1:
- best_doublecolon_end = (best_doublecolon_start +
- best_doublecolon_len)
- # For zeros at the end of the address.
- if best_doublecolon_end == len(hextets):
- hextets += ['']
- hextets[best_doublecolon_start:best_doublecolon_end] = ['']
- # For zeros at the beginning of the address.
- if best_doublecolon_start == 0:
- hextets = [''] + hextets
-
- return hextets
-
- @classmethod
- def _string_from_ip_int(cls, ip_int=None):
- """Turns a 128-bit integer into hexadecimal notation.
-
- Args:
- ip_int: An integer, the IP address.
-
- Returns:
- A string, the hexadecimal representation of the address.
-
- Raises:
- ValueError: The address is bigger than 128 bits of all ones.
-
- """
- if ip_int is None:
- ip_int = int(cls._ip)
-
- if ip_int > cls._ALL_ONES:
- raise ValueError('IPv6 address is too large')
-
- hex_str = '%032x' % ip_int
- hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
-
- hextets = cls._compress_hextets(hextets)
- return ':'.join(hextets)
-
- def _explode_shorthand_ip_string(self):
- """Expand a shortened IPv6 address.
-
- Args:
- ip_str: A string, the IPv6 address.
-
- Returns:
- A string, the expanded IPv6 address.
-
- """
- if isinstance(self, IPv6Network):
- ip_str = _compat_str(self.network_address)
- elif isinstance(self, IPv6Interface):
- ip_str = _compat_str(self.ip)
- else:
- ip_str = _compat_str(self)
-
- ip_int = self._ip_int_from_string(ip_str)
- hex_str = '%032x' % ip_int
- parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
- if isinstance(self, (_BaseNetwork, IPv6Interface)):
- return '%s/%d' % (':'.join(parts), self._prefixlen)
- return ':'.join(parts)
-
- def _reverse_pointer(self):
- """Return the reverse DNS pointer name for the IPv6 address.
-
- This implements the method described in RFC3596 2.5.
-
- """
- reverse_chars = self.exploded[::-1].replace(':', '')
- return '.'.join(reverse_chars) + '.ip6.arpa'
-
- @property
- def max_prefixlen(self):
- return self._max_prefixlen
-
- @property
- def version(self):
- return self._version
-
-
-class IPv6Address(_BaseV6, _BaseAddress):
-
- """Represent and manipulate single IPv6 Addresses."""
-
- __slots__ = ('_ip', '__weakref__')
-
- def __init__(self, address):
- """Instantiate a new IPv6 address object.
-
- Args:
- address: A string or integer representing the IP
-
- Additionally, an integer can be passed, so
- IPv6Address('2001:db8::') ==
- IPv6Address(42540766411282592856903984951653826560)
- or, more generally
- IPv6Address(int(IPv6Address('2001:db8::'))) ==
- IPv6Address('2001:db8::')
-
- Raises:
- AddressValueError: If address isn't a valid IPv6 address.
-
- """
- # Efficient constructor from integer.
- if isinstance(address, _compat_int_types):
- self._check_int_address(address)
- self._ip = address
- return
-
- # Constructing from a packed address
- if isinstance(address, bytes):
- self._check_packed_address(address, 16)
- bvs = _compat_bytes_to_byte_vals(address)
- self._ip = _compat_int_from_byte_vals(bvs, 'big')
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP string.
- addr_str = _compat_str(address)
- if '/' in addr_str:
- raise AddressValueError("Unexpected '/' in %r" % address)
- self._ip = self._ip_int_from_string(addr_str)
-
- @property
- def packed(self):
- """The binary representation of this address."""
- return v6_int_to_packed(self._ip)
-
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is a multicast address.
- See RFC 2373 2.7 for details.
-
- """
- return self in self._constants._multicast_network
-
- @property
- def is_reserved(self):
- """Test if the address is otherwise IETF reserved.
-
- Returns:
- A boolean, True if the address is within one of the
- reserved IPv6 Network ranges.
-
- """
- return any(self in x for x in self._constants._reserved_networks)
-
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
-
- Returns:
- A boolean, True if the address is reserved per RFC 4291.
-
- """
- return self in self._constants._linklocal_network
-
- @property
- def is_site_local(self):
- """Test if the address is reserved for site-local.
-
- Note that the site-local address space has been deprecated by RFC 3879.
- Use is_private to test if this address is in the space of unique local
- addresses as defined by RFC 4193.
-
- Returns:
- A boolean, True if the address is reserved per RFC 3513 2.5.6.
-
- """
- return self in self._constants._sitelocal_network
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
-
- Returns:
- A boolean, True if the address is reserved per
- iana-ipv6-special-registry.
-
- """
- return any(self in net for net in self._constants._private_networks)
-
- @property
- def is_global(self):
- """Test if this address is allocated for public networks.
-
- Returns:
- A boolean, true if the address is not reserved per
- iana-ipv6-special-registry.
-
- """
- return not self.is_private
-
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
-
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 2373 2.5.2.
-
- """
- return self._ip == 0
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback address.
-
- Returns:
- A boolean, True if the address is a loopback address as defined in
- RFC 2373 2.5.3.
-
- """
- return self._ip == 1
-
- @property
- def ipv4_mapped(self):
- """Return the IPv4 mapped address.
-
- Returns:
- If the IPv6 address is a v4 mapped address, return the
- IPv4 mapped address. Return None otherwise.
-
- """
- if (self._ip >> 32) != 0xFFFF:
- return None
- return IPv4Address(self._ip & 0xFFFFFFFF)
-
- @property
- def teredo(self):
- """Tuple of embedded teredo IPs.
-
- Returns:
- Tuple of the (server, client) IPs or None if the address
- doesn't appear to be a teredo address (doesn't start with
- 2001::/32)
-
- """
- if (self._ip >> 96) != 0x20010000:
- return None
- return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
- IPv4Address(~self._ip & 0xFFFFFFFF))
-
- @property
- def sixtofour(self):
- """Return the IPv4 6to4 embedded address.
-
- Returns:
- The IPv4 6to4-embedded address if present or None if the
- address doesn't appear to contain a 6to4 embedded address.
-
- """
- if (self._ip >> 112) != 0x2002:
- return None
- return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
-
-
-class IPv6Interface(IPv6Address):
-
- def __init__(self, address):
- if isinstance(address, (bytes, _compat_int_types)):
- IPv6Address.__init__(self, address)
- self.network = IPv6Network(self._ip)
- self._prefixlen = self._max_prefixlen
- return
- if isinstance(address, tuple):
- IPv6Address.__init__(self, address[0])
- if len(address) > 1:
- self._prefixlen = int(address[1])
- else:
- self._prefixlen = self._max_prefixlen
- self.network = IPv6Network(address, strict=False)
- self.netmask = self.network.netmask
- self.hostmask = self.network.hostmask
- return
-
- addr = _split_optional_netmask(address)
- IPv6Address.__init__(self, addr[0])
- self.network = IPv6Network(address, strict=False)
- self.netmask = self.network.netmask
- self._prefixlen = self.network._prefixlen
- self.hostmask = self.network.hostmask
-
- def __str__(self):
- return '%s/%d' % (self._string_from_ip_int(self._ip),
- self.network.prefixlen)
-
- def __eq__(self, other):
- address_equal = IPv6Address.__eq__(self, other)
- if not address_equal or address_equal is NotImplemented:
- return address_equal
- try:
- return self.network == other.network
- except AttributeError:
- # An interface with an associated network is NOT the
- # same as an unassociated address. That's why the hash
- # takes the extra info into account.
- return False
-
- def __lt__(self, other):
- address_less = IPv6Address.__lt__(self, other)
- if address_less is NotImplemented:
- return NotImplemented
- try:
- return (self.network < other.network or
- self.network == other.network and address_less)
- except AttributeError:
- # We *do* allow addresses and interfaces to be sorted. The
- # unassociated address is considered less than all interfaces.
- return False
-
- def __hash__(self):
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
-
- __reduce__ = _IPAddressBase.__reduce__
-
- @property
- def ip(self):
- return IPv6Address(self._ip)
-
- @property
- def with_prefixlen(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self._prefixlen)
-
- @property
- def with_netmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.netmask)
-
- @property
- def with_hostmask(self):
- return '%s/%s' % (self._string_from_ip_int(self._ip),
- self.hostmask)
-
- @property
- def is_unspecified(self):
- return self._ip == 0 and self.network.is_unspecified
-
- @property
- def is_loopback(self):
- return self._ip == 1 and self.network.is_loopback
-
-
-class IPv6Network(_BaseV6, _BaseNetwork):
-
- """This class represents and manipulates 128-bit IPv6 networks.
-
- Attributes: [examples for IPv6('2001:db8::1000/124')]
- .network_address: IPv6Address('2001:db8::1000')
- .hostmask: IPv6Address('::f')
- .broadcast_address: IPv6Address('2001:db8::100f')
- .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
- .prefixlen: 124
-
- """
-
- # Class to use when creating address objects
- _address_class = IPv6Address
-
- def __init__(self, address, strict=True):
- """Instantiate a new IPv6 Network object.
-
- Args:
- address: A string or integer representing the IPv6 network or the
- IP and prefix/netmask.
- '2001:db8::/128'
- '2001:db8:0000:0000:0000:0000:0000:0000/128'
- '2001:db8::'
- are all functionally the same in IPv6. That is to say,
- failing to provide a subnetmask will create an object with
- a mask of /128.
-
- Additionally, an integer can be passed, so
- IPv6Network('2001:db8::') ==
- IPv6Network(42540766411282592856903984951653826560)
- or, more generally
- IPv6Network(int(IPv6Network('2001:db8::'))) ==
- IPv6Network('2001:db8::')
-
- strict: A boolean. If true, ensure that we have been passed
- A true network address, eg, 2001:db8::1000/124 and not an
- IP address on a network, eg, 2001:db8::1/124.
-
- Raises:
- AddressValueError: If address isn't a valid IPv6 address.
- NetmaskValueError: If the netmask isn't valid for
- an IPv6 address.
- ValueError: If strict was True and a network address was not
- supplied.
-
- """
- _BaseNetwork.__init__(self, address)
-
- # Efficient constructor from integer or packed address
- if isinstance(address, (bytes, _compat_int_types)):
- self.network_address = IPv6Address(address)
- self.netmask, self._prefixlen = self._make_netmask(
- self._max_prefixlen)
- return
-
- if isinstance(address, tuple):
- if len(address) > 1:
- arg = address[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
- self.network_address = IPv6Address(address[0])
- packed = int(self.network_address)
- if packed & int(self.netmask) != packed:
- if strict:
- raise ValueError('%s has host bits set' % self)
- else:
- self.network_address = IPv6Address(packed &
- int(self.netmask))
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr = _split_optional_netmask(address)
-
- self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
-
- if len(addr) == 2:
- arg = addr[1]
- else:
- arg = self._max_prefixlen
- self.netmask, self._prefixlen = self._make_netmask(arg)
-
- if strict:
- if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
- self.network_address):
- raise ValueError('%s has host bits set' % self)
- self.network_address = IPv6Address(int(self.network_address) &
- int(self.netmask))
-
- if self._prefixlen == (self._max_prefixlen - 1):
- self.hosts = self.__iter__
-
- def hosts(self):
- """Generate Iterator over usable hosts in a network.
-
- This is like __iter__ except it doesn't return the
- Subnet-Router anycast address.
-
- """
- network = int(self.network_address)
- broadcast = int(self.broadcast_address)
- for x in _compat_range(network + 1, broadcast + 1):
- yield self._address_class(x)
-
- @property
- def is_site_local(self):
- """Test if the address is reserved for site-local.
-
- Note that the site-local address space has been deprecated by RFC 3879.
- Use is_private to test if this address is in the space of unique local
- addresses as defined by RFC 4193.
-
- Returns:
- A boolean, True if the address is reserved per RFC 3513 2.5.6.
-
- """
- return (self.network_address.is_site_local and
- self.broadcast_address.is_site_local)
-
-
-class _IPv6Constants(object):
-
- _linklocal_network = IPv6Network('fe80::/10')
-
- _multicast_network = IPv6Network('ff00::/8')
-
- _private_networks = [
- IPv6Network('::1/128'),
- IPv6Network('::/128'),
- IPv6Network('::ffff:0:0/96'),
- IPv6Network('100::/64'),
- IPv6Network('2001::/23'),
- IPv6Network('2001:2::/48'),
- IPv6Network('2001:db8::/32'),
- IPv6Network('2001:10::/28'),
- IPv6Network('fc00::/7'),
- IPv6Network('fe80::/10'),
- ]
-
- _reserved_networks = [
- IPv6Network('::/8'), IPv6Network('100::/8'),
- IPv6Network('200::/7'), IPv6Network('400::/6'),
- IPv6Network('800::/5'), IPv6Network('1000::/4'),
- IPv6Network('4000::/3'), IPv6Network('6000::/3'),
- IPv6Network('8000::/3'), IPv6Network('A000::/3'),
- IPv6Network('C000::/3'), IPv6Network('E000::/4'),
- IPv6Network('F000::/5'), IPv6Network('F800::/6'),
- IPv6Network('FE00::/9'),
- ]
-
- _sitelocal_network = IPv6Network('fec0::/10')
-
-
-IPv6Address._constants = _IPv6Constants
diff --git a/readthedocs/api/base.py b/readthedocs/api/base.py
index 73b64e431cf..a70fb024599 100644
--- a/readthedocs/api/base.py
+++ b/readthedocs/api/base.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
-"""API resources."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""API resources."""
import logging
-from builtins import object
import redis
from django.conf.urls import url
@@ -25,6 +22,7 @@
from .utils import PostAuthentication
+
log = logging.getLogger(__name__)
@@ -34,7 +32,7 @@ class ProjectResource(ModelResource):
users = fields.ToManyField('readthedocs.api.base.UserResource', 'users')
- class Meta(object):
+ class Meta:
include_absolute_url = True
allowed_methods = ['get', 'post', 'put']
queryset = Project.objects.api()
@@ -48,7 +46,7 @@ class Meta(object):
def get_object_list(self, request):
self._meta.queryset = Project.objects.api(user=request.user)
- return super(ProjectResource, self).get_object_list(request)
+ return super().get_object_list(request)
def dehydrate(self, bundle):
bundle.data['downloads'] = bundle.obj.get_downloads()
@@ -72,7 +70,9 @@ def post_list(self, request, **kwargs):
# Force this in an ugly way, at least should do "reverse"
deserialized['users'] = ['/api/v1/user/%s/' % request.user.id]
bundle = self.build_bundle(
- data=dict_strip_unicode_keys(deserialized), request=request)
+ data=dict_strip_unicode_keys(deserialized),
+ request=request,
+ )
self.is_valid(bundle)
updated_bundle = self.obj_create(bundle, request=request)
return HttpCreated(location=self.get_resource_uri(updated_bundle))
@@ -81,14 +81,20 @@ def prepend_urls(self):
return [
url(
r'^(?P%s)/schema/$' % self._meta.resource_name,
- self.wrap_view('get_schema'), name='api_get_schema'),
+ self.wrap_view('get_schema'),
+ name='api_get_schema',
+ ),
url(
r'^(?P%s)/search%s$' %
(self._meta.resource_name, trailing_slash()),
- self.wrap_view('get_search'), name='api_get_search'),
- url((r'^(?P%s)/(?P[a-z-_]+)/$') %
- self._meta.resource_name, self.wrap_view('dispatch_detail'),
- name='api_dispatch_detail'),
+ self.wrap_view('get_search'),
+ name='api_get_search',
+ ),
+ url(
+ (r'^(?P%s)/(?P[a-z-_]+)/$') % self._meta.resource_name,
+ self.wrap_view('dispatch_detail'),
+ name='api_dispatch_detail',
+ ),
]
@@ -98,7 +104,7 @@ class VersionResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project', full=True)
- class Meta(object):
+ class Meta:
allowed_methods = ['get', 'put', 'post']
always_return_data = True
queryset = Version.objects.api()
@@ -112,7 +118,7 @@ class Meta(object):
def get_object_list(self, request):
self._meta.queryset = Version.objects.api(user=request.user)
- return super(VersionResource, self).get_object_list(request)
+ return super().get_object_list(request)
def build_version(self, request, **kwargs):
project = get_object_or_404(Project, slug=kwargs['project_slug'])
@@ -125,17 +131,23 @@ def prepend_urls(self):
return [
url(
r'^(?P%s)/schema/$' % self._meta.resource_name,
- self.wrap_view('get_schema'), name='api_get_schema'),
+ self.wrap_view('get_schema'),
+ name='api_get_schema',
+ ),
url(
r'^(?P%s)/(?P[a-z-_]+[a-z0-9-_]+)/$' # noqa
% self._meta.resource_name,
self.wrap_view('dispatch_list'),
- name='api_version_list'),
- url((
- r'^(?P%s)/(?P[a-z-_]+[a-z0-9-_]+)/(?P'
- r'[a-z0-9-_.]+)/build/$') %
- self._meta.resource_name, self.wrap_view('build_version'),
- name='api_version_build_slug'),
+ name='api_version_list',
+ ),
+ url(
+ (
+ r'^(?P%s)/(?P[a-z-_]+[a-z0-9-_]+)/(?P'
+ r'[a-z0-9-_.]+)/build/$'
+ ) % self._meta.resource_name,
+ self.wrap_view('build_version'),
+ name='api_version_build_slug',
+ ),
]
@@ -145,7 +157,7 @@ class FileResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project', full=True)
- class Meta(object):
+ class Meta:
allowed_methods = ['get', 'post']
queryset = ImportedFile.objects.all()
excludes = ['md5', 'slug']
@@ -157,11 +169,15 @@ def prepend_urls(self):
return [
url(
r'^(?P%s)/schema/$' % self._meta.resource_name,
- self.wrap_view('get_schema'), name='api_get_schema'),
+ self.wrap_view('get_schema'),
+ name='api_get_schema',
+ ),
url(
r'^(?P%s)/anchor%s$' %
(self._meta.resource_name, trailing_slash()),
- self.wrap_view('get_anchor'), name='api_get_anchor'),
+ self.wrap_view('get_anchor'),
+ name='api_get_anchor',
+ ),
]
def get_anchor(self, request, **__):
@@ -190,7 +206,7 @@ class UserResource(ModelResource):
"""Read-only API resource for User model."""
- class Meta(object):
+ class Meta:
allowed_methods = ['get']
queryset = User.objects.all()
fields = ['username', 'id']
@@ -202,9 +218,12 @@ def prepend_urls(self):
return [
url(
r'^(?P%s)/schema/$' % self._meta.resource_name,
- self.wrap_view('get_schema'), name='api_get_schema'),
+ self.wrap_view('get_schema'),
+ name='api_get_schema',
+ ),
url(
- r'^(?P%s)/(?P[a-z-_]+)/$' %
- self._meta.resource_name, self.wrap_view('dispatch_detail'),
- name='api_dispatch_detail'),
+ r'^(?P%s)/(?P[a-z-_]+)/$' % self._meta.resource_name,
+ self.wrap_view('dispatch_detail'),
+ name='api_dispatch_detail',
+ ),
]
diff --git a/readthedocs/api/client.py b/readthedocs/api/client.py
index dd742198b15..a3020163449 100644
--- a/readthedocs/api/client.py
+++ b/readthedocs/api/client.py
@@ -1,16 +1,14 @@
# -*- coding: utf-8 -*-
"""Slumber API client."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import logging
-from django.conf import settings
import requests
+from django.conf import settings
from requests_toolbelt.adapters import host_header_ssl
from slumber import API
+
log = logging.getLogger(__name__)
PRODUCTION_DOMAIN = getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org')
diff --git a/readthedocs/api/utils.py b/readthedocs/api/utils.py
index 1daa2deb963..c5ecd60cd1e 100644
--- a/readthedocs/api/utils.py
+++ b/readthedocs/api/utils.py
@@ -1,13 +1,13 @@
-"""Utility classes for api module"""
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+
+"""Utility classes for api module."""
import logging
from django.utils.translation import ugettext
-
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import Authorization
-from tastypie.resources import ModelResource
from tastypie.exceptions import NotFound
+from tastypie.resources import ModelResource
log = logging.getLogger(__name__)
@@ -18,14 +18,14 @@ class PostAuthentication(BasicAuthentication):
"""Require HTTP Basic authentication for any method other than GET."""
def is_authenticated(self, request, **kwargs):
- val = super(PostAuthentication, self).is_authenticated(request,
- **kwargs)
- if request.method == "GET":
+ val = super().is_authenticated(request, **kwargs)
+ if request.method == 'GET':
return True
return val
class EnhancedModelResource(ModelResource):
+
def obj_get_list(self, request=None, *_, **kwargs): # noqa
"""
A ORM-specific implementation of ``obj_get_list``.
@@ -44,12 +44,16 @@ def obj_get_list(self, request=None, *_, **kwargs): # noqa
try:
return self.get_object_list(request).filter(**applicable_filters)
except ValueError as e:
- raise NotFound(ugettext("Invalid resource lookup data provided "
- "(mismatched type).: %(error)s")
- % {'error': e})
+ raise NotFound(
+ ugettext(
+ 'Invalid resource lookup data provided '
+ '(mismatched type).: %(error)s',
+ ) % {'error': e},
+ )
class OwnerAuthorization(Authorization):
+
def apply_limits(self, request, object_list):
if request and hasattr(request, 'user') and request.method != 'GET':
if request.user.is_authenticated:
diff --git a/readthedocs/builds/admin.py b/readthedocs/builds/admin.py
index 66c046f9c3e..571536f03a2 100644
--- a/readthedocs/builds/admin.py
+++ b/readthedocs/builds/admin.py
@@ -1,10 +1,12 @@
+# -*- coding: utf-8 -*-
+
"""Django admin interface for `~builds.models.Build` and related models."""
-from __future__ import absolute_import
from django.contrib import admin
-from readthedocs.builds.models import Build, Version, BuildCommandResult
from guardian.admin import GuardedModelAdmin
+from readthedocs.builds.models import Build, BuildCommandResult, Version
+
class BuildCommandResultInline(admin.TabularInline):
model = BuildCommandResult
@@ -12,8 +14,25 @@ class BuildCommandResultInline(admin.TabularInline):
class BuildAdmin(admin.ModelAdmin):
- fields = ('project', 'version', 'type', 'state', 'error', 'success', 'length', 'cold_storage')
- list_display = ('id', 'project', 'version_name', 'success', 'type', 'state', 'date')
+ fields = (
+ 'project',
+ 'version',
+ 'type',
+ 'state',
+ 'error',
+ 'success',
+ 'length',
+ 'cold_storage',
+ )
+ list_display = (
+ 'id',
+ 'project',
+ 'version_name',
+ 'success',
+ 'type',
+ 'state',
+ 'date',
+ )
list_filter = ('type', 'state', 'success')
list_select_related = ('project', 'version')
raw_id_fields = ('project', 'version')
@@ -26,7 +45,14 @@ def version_name(self, obj):
class VersionAdmin(GuardedModelAdmin):
search_fields = ('slug', 'project__name')
- list_display = ('slug', 'type', 'project', 'privacy_level', 'active', 'built')
+ list_display = (
+ 'slug',
+ 'type',
+ 'project',
+ 'privacy_level',
+ 'active',
+ 'built',
+ )
list_filter = ('type', 'privacy_level', 'active', 'built')
raw_id_fields = ('project',)
diff --git a/readthedocs/builds/constants.py b/readthedocs/builds/constants.py
index 99d816814ba..94858300c78 100644
--- a/readthedocs/builds/constants.py
+++ b/readthedocs/builds/constants.py
@@ -1,8 +1,10 @@
+# -*- coding: utf-8 -*-
+
"""Constants for the builds app."""
-from __future__ import absolute_import
-from django.utils.translation import ugettext_lazy as _
from django.conf import settings
+from django.utils.translation import ugettext_lazy as _
+
BUILD_STATE_TRIGGERED = 'triggered'
BUILD_STATE_CLONING = 'cloning'
diff --git a/readthedocs/builds/forms.py b/readthedocs/builds/forms.py
index 74fe0dba504..406da7417ab 100644
--- a/readthedocs/builds/forms.py
+++ b/readthedocs/builds/forms.py
@@ -1,13 +1,7 @@
-"""Django forms for the builds app."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Django forms for the builds app."""
-from builtins import object
from django import forms
from django.utils.translation import ugettext_lazy as _
@@ -17,7 +11,7 @@
class VersionForm(forms.ModelForm):
- class Meta(object):
+ class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
@@ -26,10 +20,10 @@ def clean_active(self):
if self._is_default_version() and not active:
msg = _(
'{version} is the default version of the project, '
- 'it should be active.'
+ 'it should be active.',
)
raise forms.ValidationError(
- msg.format(version=self.instance.verbose_name)
+ msg.format(version=self.instance.verbose_name),
)
return active
@@ -38,7 +32,7 @@ def _is_default_version(self):
return project.default_version == self.instance.slug
def save(self, commit=True):
- obj = super(VersionForm, self).save(commit=commit)
+ obj = super().save(commit=commit)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
return obj
diff --git a/readthedocs/builds/managers.py b/readthedocs/builds/managers.py
index 9ef1b2836e5..be9c7050ea5 100644
--- a/readthedocs/builds/managers.py
+++ b/readthedocs/builds/managers.py
@@ -1,14 +1,23 @@
-"""Build and Version class model Managers"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Build and Version class model Managers."""
from django.db import models
-from .constants import (BRANCH, TAG, LATEST, LATEST_VERBOSE_NAME, STABLE,
- STABLE_VERBOSE_NAME)
+from readthedocs.core.utils.extend import (
+ SettingsOverrideObject,
+ get_override_class,
+)
+
+from .constants import (
+ BRANCH,
+ LATEST,
+ LATEST_VERBOSE_NAME,
+ STABLE,
+ STABLE_VERBOSE_NAME,
+ TAG,
+)
from .querysets import VersionQuerySet
-from readthedocs.core.utils.extend import (SettingsOverrideObject,
- get_override_class)
__all__ = ['VersionManager']
@@ -30,9 +39,9 @@ def from_queryset(cls, queryset_class, class_name=None):
# no direct members.
queryset_class = get_override_class(
VersionQuerySet,
- VersionQuerySet._default_class # pylint: disable=protected-access
+ VersionQuerySet._default_class, # pylint: disable=protected-access
)
- return super(VersionManagerBase, cls).from_queryset(queryset_class, class_name)
+ return super().from_queryset(queryset_class, class_name)
def create_stable(self, **kwargs):
defaults = {
diff --git a/readthedocs/builds/migrations/0001_initial.py b/readthedocs/builds/migrations/0001_initial.py
index 32e6e0eb1fd..27f61efb797 100644
--- a/readthedocs/builds/migrations/0001_initial.py
+++ b/readthedocs/builds/migrations/0001_initial.py
@@ -1,10 +1,8 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+import taggit.managers
+from django.db import migrations, models
-from __future__ import absolute_import
-from django.db import models, migrations
import readthedocs.builds.version_slug
-import taggit.managers
class Migration(migrations.Migration):
@@ -77,10 +75,10 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='version',
- unique_together=set([('project', 'slug')]),
+ unique_together={('project', 'slug')},
),
migrations.AlterIndexTogether(
name='build',
- index_together=set([('version', 'state', 'type')]),
+ index_together={('version', 'state', 'type')},
),
]
diff --git a/readthedocs/builds/migrations/0002_build_command_initial.py b/readthedocs/builds/migrations/0002_build_command_initial.py
index 7b45f946830..d78b9d13d2f 100644
--- a/readthedocs/builds/migrations/0002_build_command_initial.py
+++ b/readthedocs/builds/migrations/0002_build_command_initial.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from django.db import migrations, models
-from __future__ import absolute_import
-from django.db import models, migrations
import readthedocs.builds.models
diff --git a/readthedocs/builds/migrations/0003_add-cold-storage.py b/readthedocs/builds/migrations/0003_add-cold-storage.py
index 7c474a973c0..2c53cc144dd 100644
--- a/readthedocs/builds/migrations/0003_add-cold-storage.py
+++ b/readthedocs/builds/migrations/0003_add-cold-storage.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-09 20:14
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/builds/migrations/0004_add-apiversion-proxy-model.py b/readthedocs/builds/migrations/0004_add-apiversion-proxy-model.py
index b96db28e95a..8247d9a0249 100644
--- a/readthedocs/builds/migrations/0004_add-apiversion-proxy-model.py
+++ b/readthedocs/builds/migrations/0004_add-apiversion-proxy-model.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-27 00:17
-from __future__ import unicode_literals
-
from django.db import migrations
diff --git a/readthedocs/builds/migrations/0005_remove-version-alias.py b/readthedocs/builds/migrations/0005_remove-version-alias.py
index a41af51e2df..65f6aadd3a5 100644
--- a/readthedocs/builds/migrations/0005_remove-version-alias.py
+++ b/readthedocs/builds/migrations/0005_remove-version-alias.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-10-17 04:20
-from __future__ import unicode_literals
-
from django.db import migrations, models
+
import readthedocs.builds.version_slug
diff --git a/readthedocs/builds/migrations/0006_add_config_field.py b/readthedocs/builds/migrations/0006_add_config_field.py
index 7af36e8ad79..deb3700278c 100644
--- a/readthedocs/builds/migrations/0006_add_config_field.py
+++ b/readthedocs/builds/migrations/0006_add_config_field.py
@@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-02 13:24
-from __future__ import unicode_literals
-
-from django.db import migrations
import jsonfield.fields
+from django.db import migrations
class Migration(migrations.Migration):
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py
index 70ac5ed19da..e44d426b381 100644
--- a/readthedocs/builds/models.py
+++ b/readthedocs/builds/models.py
@@ -1,21 +1,15 @@
# -*- coding: utf-8 -*-
-"""Models for the builds app."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Models for the builds app."""
import logging
import os.path
import re
from shutil import rmtree
-from builtins import object
from django.conf import settings
from django.db import models
+from django.urls import reverse
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext
@@ -23,7 +17,6 @@
from guardian.shortcuts import assign
from jsonfield import JSONField
from taggit.managers import TaggableManager
-from django.urls import reverse
from readthedocs.core.utils import broadcast
from readthedocs.projects.constants import (
@@ -55,8 +48,12 @@
)
from .version_slug import VersionSlugField
+
DEFAULT_VERSION_PRIVACY_LEVEL = getattr(
- settings, 'DEFAULT_VERSION_PRIVACY_LEVEL', 'public')
+ settings,
+ 'DEFAULT_VERSION_PRIVACY_LEVEL',
+ 'public',
+)
log = logging.getLogger(__name__)
@@ -96,7 +93,10 @@ class Version(models.Model):
#: filesystem to determine how the paths for this version are called. It
#: must not be used for any other identifying purposes.
slug = VersionSlugField(
- _('Slug'), max_length=255, populate_from='verbose_name')
+ _('Slug'),
+ max_length=255,
+ populate_from='verbose_name',
+ )
supported = models.BooleanField(_('Supported'), default=True)
active = models.BooleanField(_('Active'), default=False)
@@ -114,13 +114,14 @@ class Version(models.Model):
objects = VersionManager.from_queryset(VersionQuerySet)()
- class Meta(object):
+ class Meta:
unique_together = [('project', 'slug')]
ordering = ['-verbose_name']
permissions = (
# Translators: Permission around whether a user can view the
# version
- ('view_version', _('View Version')),)
+ ('view_version', _('View Version')),
+ )
def __str__(self):
return ugettext(
@@ -128,7 +129,8 @@ def __str__(self):
version=self.verbose_name,
project=self.project,
pk=self.pk,
- ))
+ ),
+ )
@property
def config(self):
@@ -139,9 +141,10 @@ def config(self):
:rtype: dict
"""
last_build = (
- self.builds.filter(state='finished', success=True)
- .order_by('-date')
- .first()
+ self.builds.filter(
+ state='finished',
+ success=True,
+ ).order_by('-date').first()
)
return last_build.config
@@ -184,7 +187,9 @@ def commit_name(self):
# If we came that far it's not a special version nor a branch or tag.
# Therefore just return the identifier to make a safe guess.
- log.debug('TODO: Raise an exception here. Testing what cases it happens')
+ log.debug(
+ 'TODO: Raise an exception here. Testing what cases it happens',
+ )
return self.identifier
def get_absolute_url(self):
@@ -198,16 +203,21 @@ def get_absolute_url(self):
)
private = self.privacy_level == PRIVATE
return self.project.get_docs_url(
- version_slug=self.slug, private=private)
+ version_slug=self.slug,
+ private=private,
+ )
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
"""Add permissions to the Version for all owners on save."""
from readthedocs.projects import tasks
- obj = super(Version, self).save(*args, **kwargs)
+ obj = super().save(*args, **kwargs)
for owner in self.project.users.all():
assign('view_version', owner, self)
broadcast(
- type='app', task=tasks.symlink_project, args=[self.project.pk])
+ type='app',
+ task=tasks.symlink_project,
+ args=[self.project.pk],
+ )
return obj
def delete(self, *args, **kwargs): # pylint: disable=arguments-differ
@@ -219,7 +229,7 @@ def delete(self, *args, **kwargs): # pylint: disable=arguments-differ
args=[self.get_artifact_paths()],
)
project_pk = self.project.pk
- super(Version, self).delete(*args, **kwargs)
+ super().delete(*args, **kwargs)
broadcast(
type='app',
task=tasks.symlink_project,
@@ -253,19 +263,27 @@ def get_downloads(self, pretty=False):
data['PDF'] = project.get_production_media_url('pdf', self.slug)
if project.has_htmlzip(self.slug):
data['HTML'] = project.get_production_media_url(
- 'htmlzip', self.slug)
+ 'htmlzip',
+ self.slug,
+ )
if project.has_epub(self.slug):
data['Epub'] = project.get_production_media_url(
- 'epub', self.slug)
+ 'epub',
+ self.slug,
+ )
else:
if project.has_pdf(self.slug):
data['pdf'] = project.get_production_media_url('pdf', self.slug)
if project.has_htmlzip(self.slug):
data['htmlzip'] = project.get_production_media_url(
- 'htmlzip', self.slug)
+ 'htmlzip',
+ self.slug,
+ )
if project.has_epub(self.slug):
data['epub'] = project.get_production_media_url(
- 'epub', self.slug)
+ 'epub',
+ self.slug,
+ )
return data
def get_conf_py_path(self):
@@ -291,9 +309,8 @@ def get_artifact_paths(self):
for type_ in ('pdf', 'epub', 'htmlzip'):
paths.append(
- self.project.get_production_media_path(
- type_=type_,
- version_slug=self.slug),
+ self.project
+ .get_production_media_path(type_=type_, version_slug=self.slug),
)
paths.append(self.project.rtd_build_path(version=self.slug))
@@ -315,7 +332,12 @@ def clean_build_path(self):
log.exception('Build path cleanup failed')
def get_github_url(
- self, docroot, filename, source_suffix='.rst', action='view'):
+ self,
+ docroot,
+ filename,
+ source_suffix='.rst',
+ action='view',
+ ):
"""
Return a GitHub URL for a given filename.
@@ -357,7 +379,12 @@ def get_github_url(
)
def get_gitlab_url(
- self, docroot, filename, source_suffix='.rst', action='view'):
+ self,
+ docroot,
+ filename,
+ source_suffix='.rst',
+ action='view',
+ ):
repo_url = self.project.repo
if 'gitlab' not in repo_url:
return ''
@@ -442,7 +469,7 @@ def __init__(self, *args, **kwargs):
del kwargs[key]
except KeyError:
pass
- super(APIVersion, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def save(self, *args, **kwargs):
return 0
@@ -454,13 +481,28 @@ class Build(models.Model):
"""Build data."""
project = models.ForeignKey(
- Project, verbose_name=_('Project'), related_name='builds')
+ Project,
+ verbose_name=_('Project'),
+ related_name='builds',
+ )
version = models.ForeignKey(
- Version, verbose_name=_('Version'), null=True, related_name='builds')
+ Version,
+ verbose_name=_('Version'),
+ null=True,
+ related_name='builds',
+ )
type = models.CharField(
- _('Type'), max_length=55, choices=BUILD_TYPES, default='html')
+ _('Type'),
+ max_length=55,
+ choices=BUILD_TYPES,
+ default='html',
+ )
state = models.CharField(
- _('State'), max_length=55, choices=BUILD_STATE, default='finished')
+ _('State'),
+ max_length=55,
+ choices=BUILD_STATE,
+ default='finished',
+ )
date = models.DateTimeField(_('Date'), auto_now_add=True)
success = models.BooleanField(_('Success'), default=True)
@@ -470,16 +512,26 @@ class Build(models.Model):
error = models.TextField(_('Error'), default='', blank=True)
exit_code = models.IntegerField(_('Exit code'), null=True, blank=True)
commit = models.CharField(
- _('Commit'), max_length=255, null=True, blank=True)
+ _('Commit'),
+ max_length=255,
+ null=True,
+ blank=True,
+ )
_config = JSONField(_('Configuration used in the build'), default=dict)
length = models.IntegerField(_('Build Length'), null=True, blank=True)
builder = models.CharField(
- _('Builder'), max_length=255, null=True, blank=True)
+ _('Builder'),
+ max_length=255,
+ null=True,
+ blank=True,
+ )
cold_storage = models.NullBooleanField(
- _('Cold Storage'), help_text='Build steps stored outside the database.')
+ _('Cold Storage'),
+ help_text='Build steps stored outside the database.',
+ )
# Manager
@@ -487,13 +539,13 @@ class Build(models.Model):
CONFIG_KEY = '__config'
- class Meta(object):
+ class Meta:
ordering = ['-date']
get_latest_by = 'date'
index_together = [['version', 'state', 'type']]
def __init__(self, *args, **kwargs):
- super(Build, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._config_changed = False
@property
@@ -506,14 +558,11 @@ def previous(self):
date = self.date or timezone.now()
if self.project is not None and self.version is not None:
return (
- Build.objects
- .filter(
+ Build.objects.filter(
project=self.project,
version=self.version,
date__lt=date,
- )
- .order_by('-date')
- .first()
+ ).order_by('-date').first()
)
return None
@@ -523,9 +572,9 @@ def config(self):
Get the config used for this build.
Since we are saving the config into the JSON field only when it differs
- from the previous one, this helper returns the correct JSON used in
- this Build object (it could be stored in this object or one of the
- previous ones).
+ from the previous one, this helper returns the correct JSON used in this
+ Build object (it could be stored in this object or one of the previous
+ ones).
"""
if self.CONFIG_KEY in self._config:
return Build.objects.get(pk=self._config[self.CONFIG_KEY])._config
@@ -553,11 +602,13 @@ def save(self, *args, **kwargs): # noqa
"""
if self.pk is None or self._config_changed:
previous = self.previous
- if (previous is not None and
- self._config and self._config == previous.config):
+ if (
+ previous is not None and self._config and
+ self._config == previous.config
+ ):
previous_pk = previous._config.get(self.CONFIG_KEY, previous.pk)
self._config = {self.CONFIG_KEY: previous_pk}
- super(Build, self).save(*args, **kwargs)
+ super().save(*args, **kwargs)
self._config_changed = False
def __str__(self):
@@ -568,7 +619,8 @@ def __str__(self):
self.project.users.all().values_list('username', flat=True),
),
pk=self.pk,
- ))
+ ),
+ )
def get_absolute_url(self):
return reverse('builds_detail', args=[self.project.slug, self.pk])
@@ -579,7 +631,7 @@ def finished(self):
return self.state == BUILD_STATE_FINISHED
-class BuildCommandResultMixin(object):
+class BuildCommandResultMixin:
"""
Mixin for common command result methods/properties.
@@ -609,7 +661,10 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
"""Build command for a ``Build``."""
build = models.ForeignKey(
- Build, verbose_name=_('Build'), related_name='commands')
+ Build,
+ verbose_name=_('Build'),
+ related_name='commands',
+ )
command = models.TextField(_('Command'))
description = models.TextField(_('Description'), blank=True)
@@ -619,7 +674,7 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
start_time = models.DateTimeField(_('Start time'))
end_time = models.DateTimeField(_('End time'))
- class Meta(object):
+ class Meta:
ordering = ['start_time']
get_latest_by = 'start_time'
@@ -628,7 +683,8 @@ class Meta(object):
def __str__(self):
return (
ugettext('Build command {pk} for build {build}')
- .format(pk=self.pk, build=self.build))
+ .format(pk=self.pk, build=self.build)
+ )
@property
def run_time(self):
diff --git a/readthedocs/builds/querysets.py b/readthedocs/builds/querysets.py
index 34408b9d982..ae7e5a8fb79 100644
--- a/readthedocs/builds/querysets.py
+++ b/readthedocs/builds/querysets.py
@@ -1,6 +1,6 @@
-"""Build and Version QuerySet classes"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Build and Version QuerySet classes."""
from django.db import models
from guardian.shortcuts import get_objects_for_user
@@ -37,7 +37,9 @@ def public(self, user=None, project=None, only_active=True):
return queryset
def protected(self, user=None, project=None, only_active=True):
- queryset = self.filter(privacy_level__in=[constants.PUBLIC, constants.PROTECTED])
+ queryset = self.filter(
+ privacy_level__in=[constants.PUBLIC, constants.PROTECTED],
+ )
if user:
queryset = self._add_user_repos(queryset, user)
if project:
@@ -60,10 +62,10 @@ def api(self, user=None):
return self.public(user, only_active=False)
def for_project(self, project):
- """Return all versions for a project, including translations"""
+ """Return all versions for a project, including translations."""
return self.filter(
models.Q(project=project) |
- models.Q(project__main_language_project=project)
+ models.Q(project__main_language_project=project),
)
@@ -119,8 +121,7 @@ def _add_user_repos(self, queryset, user=None):
if user.is_authenticated:
user_queryset = get_objects_for_user(user, 'builds.view_version')
pks = user_queryset.values_list('pk', flat=True)
- queryset = self.filter(
- build__version__pk__in=pks) | queryset
+ queryset = self.filter(build__version__pk__in=pks,) | queryset
return queryset.distinct()
def public(self, user=None, project=None):
diff --git a/readthedocs/builds/signals.py b/readthedocs/builds/signals.py
index c80f7d3e42d..df87930f9b7 100644
--- a/readthedocs/builds/signals.py
+++ b/readthedocs/builds/signals.py
@@ -1,6 +1,7 @@
-"""Build signals"""
+# -*- coding: utf-8 -*-
+
+"""Build signals."""
-from __future__ import absolute_import
import django.dispatch
diff --git a/readthedocs/builds/syncers.py b/readthedocs/builds/syncers.py
index 0ac0c6dedf6..6b7b1d337bf 100644
--- a/readthedocs/builds/syncers.py
+++ b/readthedocs/builds/syncers.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Classes to copy files between build and web servers.
@@ -5,26 +7,23 @@
local machine.
"""
-from __future__ import absolute_import
-
import getpass
import logging
import os
import shutil
-from builtins import object
from django.conf import settings
-from readthedocs.core.utils.extend import SettingsOverrideObject
from readthedocs.core.utils import safe_makedirs
+from readthedocs.core.utils.extend import SettingsOverrideObject
log = logging.getLogger(__name__)
-class BaseSyncer(object):
+class BaseSyncer:
- """A base object for syncers and pullers"""
+ """A base object for syncers and pullers."""
@classmethod
def copy(cls, path, target, is_file=False, **kwargs):
@@ -36,7 +35,7 @@ class LocalSyncer(BaseSyncer):
@classmethod
def copy(cls, path, target, is_file=False, **kwargs):
"""A copy command that works with files or directories."""
- log.info("Local Copy %s to %s", path, target)
+ log.info('Local Copy %s to %s', path, target)
if is_file:
if path == target:
# Don't copy the same file over itself
@@ -62,28 +61,31 @@ def copy(cls, path, target, is_file=False, **kwargs):
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
app_servers = getattr(settings, 'MULTIPLE_APP_SERVERS', [])
if app_servers:
- log.info("Remote Copy %s to %s on %s", path, target, app_servers)
+ log.info('Remote Copy %s to %s on %s', path, target, app_servers)
for server in app_servers:
- mkdir_cmd = ("ssh %s@%s mkdir -p %s" % (sync_user, server, target))
+ mkdir_cmd = (
+ 'ssh {}@{} mkdir -p {}'.format(sync_user, server, target)
+ )
ret = os.system(mkdir_cmd)
if ret != 0:
- log.debug("Copy error to app servers: cmd=%s", mkdir_cmd)
+ log.debug('Copy error to app servers: cmd=%s', mkdir_cmd)
if is_file:
- slash = ""
+ slash = ''
else:
- slash = "/"
+ slash = '/'
# Add a slash when copying directories
sync_cmd = (
- "rsync -e 'ssh -T' -av --delete {path}{slash} {user}@{server}:{target}"
- .format(
+ "rsync -e 'ssh -T' -av --delete {path}{slash} {user}@{server}:{target}".format(
path=path,
slash=slash,
user=sync_user,
server=server,
- target=target))
+ target=target,
+ )
+ )
ret = os.system(sync_cmd)
if ret != 0:
- log.debug("Copy error to app servers: cmd=%s", sync_cmd)
+ log.debug('Copy error to app servers: cmd=%s', sync_cmd)
class DoubleRemotePuller(BaseSyncer):
@@ -98,29 +100,32 @@ def copy(cls, path, target, host, is_file=False, **kwargs): # pylint: disable=a
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
app_servers = getattr(settings, 'MULTIPLE_APP_SERVERS', [])
if not is_file:
- path += "/"
- log.info("Remote Copy %s to %s", path, target)
+ path += '/'
+ log.info('Remote Copy %s to %s', path, target)
for server in app_servers:
if not is_file:
- mkdir_cmd = "ssh {user}@{server} mkdir -p {target}".format(
- user=sync_user, server=server, target=target
+ mkdir_cmd = 'ssh {user}@{server} mkdir -p {target}'.format(
+ user=sync_user,
+ server=server,
+ target=target,
)
ret = os.system(mkdir_cmd)
if ret != 0:
- log.debug("MkDir error to app servers: cmd=%s", mkdir_cmd)
+ log.debug('MkDir error to app servers: cmd=%s', mkdir_cmd)
# Add a slash when copying directories
sync_cmd = (
"ssh {user}@{server} 'rsync -av "
- "--delete --exclude projects {user}@{host}:{path} {target}'"
- .format(
+ "--delete --exclude projects {user}@{host}:{path} {target}'".format(
host=host,
path=path,
user=sync_user,
server=server,
- target=target))
+ target=target,
+ )
+ )
ret = os.system(sync_cmd)
if ret != 0:
- log.debug("Copy error to app servers: cmd=%s", sync_cmd)
+ log.debug('Copy error to app servers: cmd=%s', sync_cmd)
class RemotePuller(BaseSyncer):
@@ -134,8 +139,8 @@ def copy(cls, path, target, host, is_file=False, **kwargs): # pylint: disable=a
"""
sync_user = getattr(settings, 'SYNC_USER', getpass.getuser())
if not is_file:
- path += "/"
- log.info("Remote Pull %s to %s", path, target)
+ path += '/'
+ log.info('Remote Pull %s to %s', path, target)
if not is_file and not os.path.exists(target):
safe_makedirs(target)
# Add a slash when copying directories
@@ -148,7 +153,7 @@ def copy(cls, path, target, host, is_file=False, **kwargs): # pylint: disable=a
ret = os.system(sync_cmd)
if ret != 0:
log.debug(
- "Copy error to app servers. Command: [%s] Return: [%s]",
+ 'Copy error to app servers. Command: [%s] Return: [%s]',
sync_cmd,
ret,
)
diff --git a/readthedocs/builds/urls.py b/readthedocs/builds/urls.py
index a148362a51b..9c421662047 100644
--- a/readthedocs/builds/urls.py
+++ b/readthedocs/builds/urls.py
@@ -1,12 +1,11 @@
# -*- coding: utf-8 -*-
-"""URL configuration for builds app."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""URL configuration for builds app."""
from django.conf.urls import url
from .views import builds_redirect_detail, builds_redirect_list
+
urlpatterns = [
url(
r'^(?P[-\w]+)/(?P\d+)/$',
diff --git a/readthedocs/builds/utils.py b/readthedocs/builds/utils.py
index 7fcc245dbb8..e0025dc77ae 100644
--- a/readthedocs/builds/utils.py
+++ b/readthedocs/builds/utils.py
@@ -1,11 +1,12 @@
# -*- coding: utf-8 -*-
-"""Utilities for the builds app."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Utilities for the builds app."""
from readthedocs.projects.constants import (
- BITBUCKET_REGEXS, GITHUB_REGEXS, GITLAB_REGEXS)
+ BITBUCKET_REGEXS,
+ GITHUB_REGEXS,
+ GITLAB_REGEXS,
+)
def get_github_username_repo(url):
diff --git a/readthedocs/builds/version_slug.py b/readthedocs/builds/version_slug.py
index 61622369da7..b7f88cd585f 100644
--- a/readthedocs/builds/version_slug.py
+++ b/readthedocs/builds/version_slug.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Contains logic for handling version slugs.
@@ -17,8 +19,6 @@
another number would be confusing.
"""
-from __future__ import absolute_import
-
import math
import re
import string
@@ -26,7 +26,6 @@
from django.db import models
from django.utils.encoding import force_text
-from builtins import range
def get_fields_with_model(cls):
@@ -37,12 +36,10 @@ def get_fields_with_model(cls):
prescrived in the Django docs.
https://docs.djangoproject.com/en/1.11/ref/models/meta/#migrating-from-the-old-api
"""
- return [
- (f, f.model if f.model != cls else None)
- for f in cls._meta.get_fields()
- if not f.is_relation or f.one_to_one or
- (f.many_to_one and f.related_model)
- ]
+ return [(f, f.model if f.model != cls else None)
+ for f in cls._meta.get_fields()
+ if not f.is_relation or f.one_to_one or
+ (f.many_to_one and f.related_model)]
# Regex breakdown:
@@ -72,7 +69,7 @@ def __init__(self, *args, **kwargs):
raise ValueError("missing 'populate_from' argument")
else:
self._populate_from = populate_from
- super(VersionSlugField, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def get_queryset(self, model_cls, slug_field):
# pylint: disable=protected-access
@@ -168,7 +165,8 @@ def create_slug(self, model_instance):
count += 1
assert self.test_pattern.match(slug), (
- 'Invalid generated slug: {slug}'.format(slug=slug))
+ 'Invalid generated slug: {slug}'.format(slug=slug)
+ )
return slug
def pre_save(self, model_instance, add):
@@ -180,6 +178,6 @@ def pre_save(self, model_instance, add):
return value
def deconstruct(self):
- name, path, args, kwargs = super(VersionSlugField, self).deconstruct()
+ name, path, args, kwargs = super().deconstruct()
kwargs['populate_from'] = self._populate_from
return name, path, args, kwargs
diff --git a/readthedocs/builds/views.py b/readthedocs/builds/views.py
index 7cbe310fbb5..e86a673dc27 100644
--- a/readthedocs/builds/views.py
+++ b/readthedocs/builds/views.py
@@ -2,16 +2,8 @@
"""Views for builds app."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
-from builtins import object
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import (
@@ -33,7 +25,7 @@
log = logging.getLogger(__name__)
-class BuildBase(object):
+class BuildBase:
model = Build
def get_queryset(self):
@@ -43,13 +35,14 @@ def get_queryset(self):
slug=self.project_slug,
)
queryset = Build.objects.public(
- user=self.request.user, project=self.project
+ user=self.request.user,
+ project=self.project,
)
return queryset
-class BuildTriggerMixin(object):
+class BuildTriggerMixin:
@method_decorator(login_required)
def post(self, request, project_slug):
@@ -65,7 +58,10 @@ def post(self, request, project_slug):
slug=version_slug,
)
- update_docs_task, build = trigger_build(project=project, version=version)
+ update_docs_task, build = trigger_build(
+ project=project,
+ version=version,
+ )
if (update_docs_task, build) == (None, None):
# Build was skipped
messages.add_message(
@@ -85,15 +81,17 @@ def post(self, request, project_slug):
class BuildList(BuildBase, BuildTriggerMixin, ListView):
def get_context_data(self, **kwargs):
- context = super(BuildList, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
- active_builds = self.get_queryset().exclude(state='finished'
- ).values('id')
+ active_builds = self.get_queryset().exclude(
+ state='finished',
+ ).values('id')
context['project'] = self.project
context['active_builds'] = active_builds
context['versions'] = Version.objects.public(
- user=self.request.user, project=self.project
+ user=self.request.user,
+ project=self.project,
)
context['build_qs'] = self.get_queryset()
@@ -104,7 +102,7 @@ class BuildDetail(BuildBase, DetailView):
pk_url_kwarg = 'build_pk'
def get_context_data(self, **kwargs):
- context = super(BuildDetail, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['project'] = self.project
return context
@@ -114,11 +112,11 @@ def get_context_data(self, **kwargs):
def builds_redirect_list(request, project_slug): # pylint: disable=unused-argument
return HttpResponsePermanentRedirect(
- reverse('builds_project_list', args=[project_slug])
+ reverse('builds_project_list', args=[project_slug]),
)
def builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument
return HttpResponsePermanentRedirect(
- reverse('builds_detail', args=[project_slug, pk])
+ reverse('builds_detail', args=[project_slug, pk]),
)
diff --git a/readthedocs/config/__init__.py b/readthedocs/config/__init__.py
index 314f6c394cc..23006b4c46e 100644
--- a/readthedocs/config/__init__.py
+++ b/readthedocs/config/__init__.py
@@ -1,2 +1,5 @@
+# -*- coding: utf-8 -*-
+
+"""Logic to parse and validate ``readthedocs.yaml`` file."""
from .config import * # noqa
from .parser import * # noqa
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py
index 223bd7b0a0b..e493d09db22 100644
--- a/readthedocs/config/config.py
+++ b/readthedocs/config/config.py
@@ -3,13 +3,9 @@
# pylint: disable=too-many-lines
"""Build configuration for rtd."""
-from __future__ import division, print_function, unicode_literals
-
import os
from contextlib import contextmanager
-import six
-
from readthedocs.projects.constants import DOCUMENTATION_CHOICES
from .find import find_one
@@ -26,6 +22,7 @@
validate_string,
)
+
__all__ = (
'ALL',
'load',
@@ -79,7 +76,7 @@ class ConfigError(Exception):
def __init__(self, message, code):
self.code = code
- super(ConfigError, self).__init__(message)
+ super().__init__(message)
class ConfigOptionNotSupportedError(ConfigError):
@@ -91,9 +88,9 @@ def __init__(self, configuration):
template = (
'The "{}" configuration option is not supported in this version'
)
- super(ConfigOptionNotSupportedError, self).__init__(
+ super().__init__(
template.format(self.configuration),
- CONFIG_NOT_SUPPORTED
+ CONFIG_NOT_SUPPORTED,
)
@@ -112,10 +109,10 @@ def __init__(self, key, code, error_message, source_file=None):
code=code,
error=error_message,
)
- super(InvalidConfig, self).__init__(message, code=code)
+ super().__init__(message, code=code)
-class BuildConfigBase(object):
+class BuildConfigBase:
"""
Config that handles the build of one particular documentation.
@@ -134,9 +131,15 @@ class BuildConfigBase(object):
"""
PUBLIC_ATTRIBUTES = [
- 'version', 'formats', 'python',
- 'conda', 'build', 'doctype',
- 'sphinx', 'mkdocs', 'submodules',
+ 'version',
+ 'formats',
+ 'python',
+ 'conda',
+ 'build',
+ 'doctype',
+ 'sphinx',
+ 'mkdocs',
+ 'submodules',
]
version = None
@@ -223,7 +226,7 @@ def validate(self):
@property
def python_interpreter(self):
ver = self.python_full_version
- return 'python{0}'.format(ver)
+ return 'python{}'.format(ver)
@property
def python_full_version(self):
@@ -340,13 +343,11 @@ def validate_build(self):
# Prepend proper image name to user's image name
build['image'] = '{}:{}'.format(
DOCKER_DEFAULT_IMAGE,
- build['image']
+ build['image'],
)
# Update docker default settings from image name
if build['image'] in DOCKER_IMAGE_SETTINGS:
- self.env_config.update(
- DOCKER_IMAGE_SETTINGS[build['image']]
- )
+ self.env_config.update(DOCKER_IMAGE_SETTINGS[build['image']])
# Allow to override specific project
config_image = self.defaults.get('build_image')
@@ -373,20 +374,22 @@ def validate_python(self):
self.error(
'python',
self.PYTHON_INVALID_MESSAGE,
- code=PYTHON_INVALID)
+ code=PYTHON_INVALID,
+ )
# Validate use_system_site_packages.
if 'use_system_site_packages' in raw_python:
- with self.catch_validation_error(
- 'python.use_system_site_packages'):
+ with self.catch_validation_error('python.use_system_site_packages'):
python['use_system_site_packages'] = validate_bool(
- raw_python['use_system_site_packages'])
+ raw_python['use_system_site_packages'],
+ )
# Validate pip_install.
if 'pip_install' in raw_python:
with self.catch_validation_error('python.pip_install'):
python['install_with_pip'] = validate_bool(
- raw_python['pip_install'])
+ raw_python['pip_install'],
+ )
# Validate extra_requirements.
if 'extra_requirements' in raw_python:
@@ -395,29 +398,30 @@ def validate_python(self):
self.error(
'python.extra_requirements',
self.PYTHON_EXTRA_REQUIREMENTS_INVALID_MESSAGE,
- code=PYTHON_INVALID)
+ code=PYTHON_INVALID,
+ )
if not python['install_with_pip']:
python['extra_requirements'] = []
else:
for extra_name in raw_extra_requirements:
- with self.catch_validation_error(
- 'python.extra_requirements'):
+ with self.catch_validation_error('python.extra_requirements'):
python['extra_requirements'].append(
- validate_string(extra_name)
+ validate_string(extra_name),
)
# Validate setup_py_install.
if 'setup_py_install' in raw_python:
with self.catch_validation_error('python.setup_py_install'):
python['install_with_setup'] = validate_bool(
- raw_python['setup_py_install'])
+ raw_python['setup_py_install'],
+ )
if 'version' in raw_python:
with self.catch_validation_error('python.version'):
# Try to convert strings to an int first, to catch '2', then
# a float, to catch '2.7'
version = raw_python['version']
- if isinstance(version, six.string_types):
+ if isinstance(version, str):
try:
version = int(version)
except ValueError:
@@ -444,7 +448,8 @@ def validate_conda(self):
if 'file' in raw_conda:
with self.catch_validation_error('conda.file'):
conda_environment = validate_file(
- raw_conda['file'], self.base_path
+ raw_conda['file'],
+ self.base_path,
)
conda['environment'] = conda_environment
@@ -650,7 +655,7 @@ def validate_python(self):
python = {}
with self.catch_validation_error('python.version'):
version = self.pop_config('python.version', 3)
- if isinstance(version, six.string_types):
+ if isinstance(version, str):
try:
version = int(version)
except ValueError:
@@ -682,7 +687,8 @@ def validate_python(self):
with self.catch_validation_error('python.extra_requirements'):
extra_requirements = self.pop_config(
- 'python.extra_requirements', []
+ 'python.extra_requirements',
+ [],
)
extra_requirements = validate_list(extra_requirements)
if extra_requirements and not python['install_with_pip']:
@@ -800,7 +806,8 @@ def validate_sphinx(self):
if not configuration:
configuration = None
configuration = self.pop_config(
- 'sphinx.configuration', configuration
+ 'sphinx.configuration',
+ configuration,
)
if configuration is not None:
configuration = validate_file(configuration, self.base_path)
@@ -816,9 +823,8 @@ def validate_final_doc_type(self):
"""
Validates that the doctype is the same as the admin panel.
- This a temporal validation, as the configuration file
- should support per version doctype, but we need to
- adapt the rtd code for that.
+ This a temporal validation, as the configuration file should support per
+ version doctype, but we need to adapt the rtd code for that.
"""
dashboard_doctype = self.defaults.get('doctype', 'sphinx')
if self.doctype != dashboard_doctype:
@@ -828,7 +834,7 @@ def validate_final_doc_type(self):
if dashboard_doctype == 'mkdocs' or not self.sphinx:
error_msg += ' but there is no "{}" key specified.'.format(
- 'mkdocs' if dashboard_doctype == 'mkdocs' else 'sphinx'
+ 'mkdocs' if dashboard_doctype == 'mkdocs' else 'sphinx',
)
else:
error_msg += ' but your "sphinx.builder" key does not match.'
@@ -890,8 +896,8 @@ def validate_keys(self):
"""
Checks that we don't have extra keys (invalid ones).
- This should be called after all the validations are done
- and all keys are popped from `self.raw_config`.
+ This should be called after all the validations are done and all keys
+ are popped from `self.raw_config`.
"""
msg = (
'Invalid configuration option: {}. '
@@ -981,10 +987,7 @@ def load(path, env_config):
filename = find_one(path, CONFIG_FILENAME_REGEX)
if not filename:
- raise ConfigError(
- 'No configuration file found',
- code=CONFIG_REQUIRED
- )
+ raise ConfigError('No configuration file found', code=CONFIG_REQUIRED)
with open(filename, 'r') as configuration_file:
try:
config = parse(configuration_file.read())
diff --git a/readthedocs/config/find.py b/readthedocs/config/find.py
index cb3e5e0c56d..2fe5292c45e 100644
--- a/readthedocs/config/find.py
+++ b/readthedocs/config/find.py
@@ -1,6 +1,6 @@
-"""Helper functions to search files."""
+# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
+"""Helper functions to search files."""
import os
import re
diff --git a/readthedocs/config/models.py b/readthedocs/config/models.py
index bf12ddfa6d4..bc6f381e805 100644
--- a/readthedocs/config/models.py
+++ b/readthedocs/config/models.py
@@ -1,6 +1,6 @@
-"""Models for the response of the configuration object."""
+# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
+"""Models for the response of the configuration object."""
from collections import namedtuple
diff --git a/readthedocs/config/parser.py b/readthedocs/config/parser.py
index 655b1601bf7..376774d6144 100644
--- a/readthedocs/config/parser.py
+++ b/readthedocs/config/parser.py
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
-"""YAML parser for the RTD configuration file."""
-from __future__ import division, print_function, unicode_literals
+"""YAML parser for the RTD configuration file."""
import yaml
+
__all__ = ('parse', 'ParseError')
@@ -12,8 +12,6 @@ class ParseError(Exception):
"""Parser related errors."""
- pass
-
def parse(stream):
"""
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py
index 593453d8cc4..98e579b1ceb 100644
--- a/readthedocs/config/tests/test_config.py
+++ b/readthedocs/config/tests/test_config.py
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
import os
import re
import textwrap
@@ -38,6 +36,7 @@
from .utils import apply_fs
+
yaml_config_dir = {
'readthedocs.yml': textwrap.dedent(
'''
@@ -56,13 +55,15 @@ def get_build_config(config, env_config=None, source_file='readthedocs.yml'):
)
-@pytest.mark.parametrize('files', [
- {'readthedocs.ymlmore': ''}, {'first': {'readthedocs.yml': ''}},
- {'startreadthedocs.yml': ''}, {'second': {'confuser.txt': 'content'}},
- {'noroot': {'readthedocs.ymlmore': ''}}, {'third': {'readthedocs.yml': 'content', 'Makefile': ''}},
- {'noroot': {'startreadthedocs.yml': ''}}, {'fourth': {'samplefile.yaml': 'content'}},
- {'readthebots.yaml': ''}, {'fifth': {'confuser.txt': '', 'readthedocs.yml': 'content'}},
-])
+@pytest.mark.parametrize(
+ 'files', [
+ {'readthedocs.ymlmore': ''}, {'first': {'readthedocs.yml': ''}},
+ {'startreadthedocs.yml': ''}, {'second': {'confuser.txt': 'content'}},
+ {'noroot': {'readthedocs.ymlmore': ''}}, {'third': {'readthedocs.yml': 'content', 'Makefile': ''}},
+ {'noroot': {'startreadthedocs.yml': ''}}, {'fourth': {'samplefile.yaml': 'content'}},
+ {'readthebots.yaml': ''}, {'fifth': {'confuser.txt': '', 'readthedocs.yml': 'content'}},
+ ],
+)
def test_load_no_config_file(tmpdir, files):
apply_fs(tmpdir, files)
base = str(tmpdir)
@@ -72,9 +73,11 @@ def test_load_no_config_file(tmpdir, files):
def test_load_empty_config_file(tmpdir):
- apply_fs(tmpdir, {
- 'readthedocs.yml': ''
- })
+ apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': '',
+ },
+ )
base = str(tmpdir)
with raises(ConfigError):
load(base, {})
@@ -88,33 +91,39 @@ def test_minimal_config(tmpdir):
def test_load_version1(tmpdir):
- apply_fs(tmpdir, {
- 'readthedocs.yml': textwrap.dedent('''
+ apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': textwrap.dedent('''
version: 1
- ''')
- })
+ '''),
+ },
+ )
base = str(tmpdir)
build = load(base, {'allow_v2': True})
assert isinstance(build, BuildConfigV1)
def test_load_version2(tmpdir):
- apply_fs(tmpdir, {
- 'readthedocs.yml': textwrap.dedent('''
+ apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': textwrap.dedent('''
version: 2
- ''')
- })
+ '''),
+ },
+ )
base = str(tmpdir)
build = load(base, {'allow_v2': True})
assert isinstance(build, BuildConfigV2)
def test_load_unknow_version(tmpdir):
- apply_fs(tmpdir, {
- 'readthedocs.yml': textwrap.dedent('''
+ apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': textwrap.dedent('''
version: 9
- ''')
- })
+ '''),
+ },
+ )
base = str(tmpdir)
with raises(ConfigError) as excinfo:
load(base, {'allow_v2': True})
@@ -122,15 +131,17 @@ def test_load_unknow_version(tmpdir):
def test_yaml_extension(tmpdir):
- """Make sure it's capable of loading the 'readthedocs' file with a 'yaml' extension."""
- apply_fs(tmpdir, {
- 'readthedocs.yaml': textwrap.dedent(
- '''
+ """Make sure loading the 'readthedocs' file with a 'yaml' extension."""
+ apply_fs(
+ tmpdir, {
+ 'readthedocs.yaml': textwrap.dedent(
+ '''
python:
version: 3
'''
- ),
- })
+ ),
+ },
+ )
base = str(tmpdir)
config = load(base, {})
assert isinstance(config, BuildConfigV1)
@@ -143,13 +154,15 @@ def test_build_config_has_source_file(tmpdir):
def test_build_config_has_list_with_single_empty_value(tmpdir):
- base = str(apply_fs(tmpdir, {
- 'readthedocs.yml': textwrap.dedent(
- '''
+ base = str(apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': textwrap.dedent(
+ '''
formats: []
'''
- )
- }))
+ ),
+ },
+ ))
build = load(base, {})
assert isinstance(build, BuildConfigV1)
assert build.formats == []
@@ -167,7 +180,7 @@ def test_doc_type():
'defaults': {
'doctype': 'sphinx',
},
- }
+ },
)
build.validate()
assert build.doctype == 'sphinx'
@@ -211,7 +224,7 @@ def test_python_pip_install_default():
assert build.python.install_with_pip is False
-class TestValidatePythonExtraRequirements(object):
+class TestValidatePythonExtraRequirements:
def test_it_defaults_to_list(self):
build = get_build_config({'python': {}})
@@ -243,7 +256,7 @@ def test_it_uses_validate_string(self, validate_string):
validate_string.assert_any_call('tests')
-class TestValidateUseSystemSitePackages(object):
+class TestValidateUseSystemSitePackages:
def test_it_defaults_to_false(self):
build = get_build_config({'python': {}})
@@ -269,7 +282,7 @@ def test_it_uses_validate_bool(self, validate_bool):
validate_bool.assert_any_call('to-validate')
-class TestValidateSetupPyInstall(object):
+class TestValidateSetupPyInstall:
def test_it_defaults_to_false(self):
build = get_build_config({'python': {}})
@@ -295,7 +308,7 @@ def test_it_uses_validate_bool(self, validate_bool):
validate_bool.assert_any_call('to-validate')
-class TestValidatePythonVersion(object):
+class TestValidatePythonVersion:
def test_it_defaults_to_a_valid_version(self):
build = get_build_config({'python': {}})
@@ -386,7 +399,7 @@ def test_it_respects_default_value(self, value):
assert build.python.version == value
-class TestValidateFormats(object):
+class TestValidateFormats:
def test_it_defaults_to_empty(self):
build = get_build_config({})
@@ -459,7 +472,7 @@ def test_valid_build_config():
assert build.python.use_system_site_packages is False
-class TestValidateBuild(object):
+class TestValidateBuild:
def test_it_fails_if_build_is_invalid_option(self, tmpdir):
apply_fs(tmpdir, yaml_config_dir)
@@ -523,7 +536,8 @@ def test_default(self, tmpdir):
assert build.build.image == 'readthedocs/build:2.0'
@pytest.mark.parametrize(
- 'image', ['latest', 'readthedocs/build:3.0', 'rtd/build:latest'])
+ 'image', ['latest', 'readthedocs/build:3.0', 'rtd/build:latest'],
+ )
def test_it_priorities_image_from_env_config(self, tmpdir, image):
apply_fs(tmpdir, yaml_config_dir)
defaults = {
@@ -643,9 +657,11 @@ def test_raise_config_not_supported():
assert excinfo.value.code == CONFIG_NOT_SUPPORTED
-@pytest.mark.parametrize('correct_config_filename',
- [prefix + 'readthedocs.' + extension for prefix in {"", "."}
- for extension in {"yml", "yaml"}])
+@pytest.mark.parametrize(
+ 'correct_config_filename',
+ [prefix + 'readthedocs.' + extension for prefix in {'', '.'}
+ for extension in {'yml', 'yaml'}],
+)
def test_config_filenames_regex(correct_config_filename):
assert re.match(CONFIG_FILENAME_REGEX, correct_config_filename)
@@ -704,10 +720,11 @@ def test_as_dict(tmpdir):
assert build.as_dict() == expected_dict
-class TestBuildConfigV2(object):
+class TestBuildConfigV2:
def get_build_config(
- self, config, env_config=None, source_file='readthedocs.yml'):
+ self, config, env_config=None, source_file='readthedocs.yml',
+ ):
return BuildConfigV2(
env_config or {},
config,
@@ -740,7 +757,7 @@ def test_formats_check_invalid_value(self, value):
def test_formats_check_invalid_type(self):
build = self.get_build_config(
- {'formats': ['htmlzip', 'invalid', 'epub']}
+ {'formats': ['htmlzip', 'invalid', 'epub']},
)
with raises(InvalidConfig) as excinfo:
build.validate()
@@ -837,7 +854,8 @@ def test_build_image_check_invalid(self, value):
assert excinfo.value.key == 'build.image'
@pytest.mark.parametrize(
- 'image', ['latest', 'readthedocs/build:3.0', 'rtd/build:latest'])
+ 'image', ['latest', 'readthedocs/build:3.0', 'rtd/build:latest'],
+ )
def test_build_image_priorities_default(self, image):
build = self.get_build_config(
{'build': {'image': 'latest'}},
@@ -881,9 +899,13 @@ def test_python_check_invalid_types(self, value):
build.validate()
assert excinfo.value.key == 'python'
- @pytest.mark.parametrize('image,versions',
- [('latest', [2, 2.7, 3, 3.5, 3.6]),
- ('stable', [2, 2.7, 3, 3.5, 3.6])])
+ @pytest.mark.parametrize(
+ 'image,versions',
+ [
+ ('latest', [2, 2.7, 3, 3.5, 3.6]),
+ ('stable', [2, 2.7, 3, 3.5, 3.6]),
+ ],
+ )
def test_python_version(self, image, versions):
for version in versions:
build = self.get_build_config({
@@ -909,9 +931,13 @@ def test_python_version_accepts_string(self):
build.validate()
assert build.python.version == 3.6
- @pytest.mark.parametrize('image,versions',
- [('latest', [1, 2.8, 4, 3.8]),
- ('stable', [1, 2.8, 4, 3.8])])
+ @pytest.mark.parametrize(
+ 'image,versions',
+ [
+ ('latest', [1, 2.8, 4, 3.8]),
+ ('stable', [1, 2.8, 4, 3.8]),
+ ],
+ )
def test_python_version_invalid(self, image, versions):
for version in versions:
build = self.get_build_config({
@@ -1087,7 +1113,7 @@ def test_python_extra_requirements_and_pip(self):
'python': {
'install': 'pip',
'extra_requirements': ['docs', 'tests'],
- }
+ },
})
build.validate()
assert build.python.extra_requirements == ['docs', 'tests']
@@ -1096,7 +1122,7 @@ def test_python_extra_requirements_not_install(self):
build = self.get_build_config({
'python': {
'extra_requirements': ['docs', 'tests'],
- }
+ },
})
with raises(InvalidConfig) as excinfo:
build.validate()
@@ -1107,7 +1133,7 @@ def test_python_extra_requirements_and_setup(self):
'python': {
'install': 'setup.py',
'extra_requirements': ['docs', 'tests'],
- }
+ },
})
with raises(InvalidConfig) as excinfo:
build.validate()
@@ -1203,10 +1229,14 @@ def test_sphinx_is_default_doc_type(self):
assert build.mkdocs is None
assert build.doctype == 'sphinx'
- @pytest.mark.parametrize('value,expected',
- [('html', 'sphinx'),
- ('htmldir', 'sphinx_htmldir'),
- ('singlehtml', 'sphinx_singlehtml')])
+ @pytest.mark.parametrize(
+ 'value,expected',
+ [
+ ('html', 'sphinx'),
+ ('htmldir', 'sphinx_htmldir'),
+ ('singlehtml', 'sphinx_singlehtml'),
+ ],
+ )
def test_sphinx_builder_check_valid(self, value, expected):
build = self.get_build_config(
{'sphinx': {'builder': value}},
@@ -1229,7 +1259,8 @@ def test_sphinx_builder_default(self):
build.sphinx.builder == 'sphinx'
@pytest.mark.skip(
- 'This test is not compatible with the new validation around doctype.')
+ 'This test is not compatible with the new validation around doctype.',
+ )
def test_sphinx_builder_ignores_default(self):
build = self.get_build_config(
{},
@@ -1461,7 +1492,7 @@ def test_submodules_check_invalid_type(self, value):
def test_submodules_include_check_valid(self):
build = self.get_build_config({
'submodules': {
- 'include': ['one', 'two']
+ 'include': ['one', 'two'],
},
})
build.validate()
@@ -1494,8 +1525,8 @@ def test_submodules_include_allows_all_keyword(self):
def test_submodules_exclude_check_valid(self):
build = self.get_build_config({
'submodules': {
- 'exclude': ['one', 'two']
- }
+ 'exclude': ['one', 'two'],
+ },
})
build.validate()
assert build.submodules.include == []
@@ -1595,26 +1626,28 @@ def test_submodules_recursive_explict_default(self):
assert build.submodules.exclude == []
assert build.submodules.recursive is False
- @pytest.mark.parametrize('value,key', [
- ({'typo': 'something'}, 'typo'),
- (
- {
- 'pyton': {
- 'version': 'another typo',
- }
- },
- 'pyton.version'
- ),
- (
- {
- 'build': {
- 'image': 'latest',
- 'extra': 'key',
- }
- },
- 'build.extra'
- )
- ])
+ @pytest.mark.parametrize(
+ 'value,key', [
+ ({'typo': 'something'}, 'typo'),
+ (
+ {
+ 'pyton': {
+ 'version': 'another typo',
+ },
+ },
+ 'pyton.version',
+ ),
+ (
+ {
+ 'build': {
+ 'image': 'latest',
+ 'extra': 'key',
+ },
+ },
+ 'build.extra',
+ ),
+ ],
+ )
def test_strict_validation(self, value, key):
build = self.get_build_config(value)
with raises(InvalidConfig) as excinfo:
@@ -1632,13 +1665,15 @@ def test_strict_validation_pops_all_keys(self):
build.validate()
assert build.raw_config == {}
- @pytest.mark.parametrize('value,expected', [
- ({}, []),
- ({'one': 1}, ['one']),
- ({'one': {'two': 3}}, ['one', 'two']),
- (OrderedDict([('one', 1), ('two', 2)]), ['one']),
- (OrderedDict([('one', {'two': 2}), ('three', 3)]), ['one', 'two']),
- ])
+ @pytest.mark.parametrize(
+ 'value,expected', [
+ ({}, []),
+ ({'one': 1}, ['one']),
+ ({'one': {'two': 3}}, ['one', 'two']),
+ (OrderedDict([('one', 1), ('two', 2)]), ['one']),
+ (OrderedDict([('one', {'two': 2}), ('three', 3)]), ['one', 'two']),
+ ],
+ )
def test_get_extra_key(self, value, expected):
build = self.get_build_config({})
assert build._get_extra_key(value) == expected
diff --git a/readthedocs/config/tests/test_find.py b/readthedocs/config/tests/test_find.py
index 7ba651059e3..ab813cdb6a5 100644
--- a/readthedocs/config/tests/test_find.py
+++ b/readthedocs/config/tests/test_find.py
@@ -1,8 +1,6 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
import os
-import pytest
-import six
+
from readthedocs.config.find import find_one
from .utils import apply_fs
@@ -19,16 +17,3 @@ def test_find_at_root(tmpdir):
base = str(tmpdir)
path = find_one(base, r'readthedocs\.yml')
assert path == os.path.abspath(os.path.join(base, 'readthedocs.yml'))
-
-
-@pytest.mark.skipif(not six.PY2, reason='Only for python2')
-def test_find_unicode_path(tmpdir):
- base_path = os.path.abspath(
- os.path.join(os.path.dirname(__file__), 'fixtures/bad_encode_project')
- )
- path = find_one(base_path, r'readthedocs\.yml')
- assert path == ''
- unicode_base_path = base_path.decode('utf-8')
- assert isinstance(unicode_base_path, unicode)
- path = find_one(unicode_base_path, r'readthedocs\.yml')
- assert path == ''
diff --git a/readthedocs/config/tests/test_parser.py b/readthedocs/config/tests/test_parser.py
index 5c37c3c5cb0..64a7e1f7292 100644
--- a/readthedocs/config/tests/test_parser.py
+++ b/readthedocs/config/tests/test_parser.py
@@ -1,5 +1,4 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
from io import StringIO
from pytest import raises
@@ -8,63 +7,64 @@
def test_parse_empty_config_file():
- buf = StringIO(u'')
+ buf = StringIO('')
with raises(ParseError):
parse(buf)
def test_parse_invalid_yaml():
- buf = StringIO(u'- - !asdf')
+ buf = StringIO('- - !asdf')
with raises(ParseError):
parse(buf)
def test_parse_bad_type():
- buf = StringIO(u'Hello')
+ buf = StringIO('Hello')
with raises(ParseError):
parse(buf)
def test_parse_single_config():
- buf = StringIO(u'base: path')
+ buf = StringIO('base: path')
config = parse(buf)
assert isinstance(config, dict)
assert config['base'] == 'path'
def test_parse_null_value():
- buf = StringIO(u'base: null')
+ buf = StringIO('base: null')
config = parse(buf)
assert config['base'] is None
def test_parse_empty_value():
- buf = StringIO(u'base:')
+ buf = StringIO('base:')
config = parse(buf)
assert config['base'] is None
def test_parse_empty_string_value():
- buf = StringIO(u'base: ""')
+ buf = StringIO('base: ""')
config = parse(buf)
assert config['base'] == ''
def test_parse_empty_list():
- buf = StringIO(u'base: []')
+ buf = StringIO('base: []')
config = parse(buf)
assert config['base'] == []
def test_do_not_parse_multiple_configs_in_one_file():
buf = StringIO(
- u'''
+ '''
base: path
---
base: other_path
name: second
nested:
works: true
- ''')
+ '''
+ )
with raises(ParseError):
parse(buf)
diff --git a/readthedocs/config/tests/test_utils.py b/readthedocs/config/tests/test_utils.py
index 3d89cb0d0c1..3d4b57254e3 100644
--- a/readthedocs/config/tests/test_utils.py
+++ b/readthedocs/config/tests/test_utils.py
@@ -1,5 +1,4 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
from .utils import apply_fs
diff --git a/readthedocs/config/tests/test_validation.py b/readthedocs/config/tests/test_validation.py
index 8c2519570d2..8a0e98b98c4 100644
--- a/readthedocs/config/tests/test_validation.py
+++ b/readthedocs/config/tests/test_validation.py
@@ -1,20 +1,29 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
import os
from mock import patch
from pytest import raises
-from six import text_type
from readthedocs.config.validation import (
- INVALID_BOOL, INVALID_CHOICE, INVALID_DIRECTORY, INVALID_FILE, INVALID_LIST,
- INVALID_PATH, INVALID_STRING, ValidationError, validate_bool,
- validate_choice, validate_directory, validate_file, validate_list,
- validate_path, validate_string)
-
-
-class TestValidateBool(object):
+ INVALID_BOOL,
+ INVALID_CHOICE,
+ INVALID_DIRECTORY,
+ INVALID_FILE,
+ INVALID_LIST,
+ INVALID_PATH,
+ INVALID_STRING,
+ ValidationError,
+ validate_bool,
+ validate_choice,
+ validate_directory,
+ validate_file,
+ validate_list,
+ validate_path,
+ validate_string,
+)
+
+
+class TestValidateBool:
def test_it_accepts_true(self):
assert validate_bool(True) is True
@@ -33,7 +42,7 @@ def test_it_fails_on_string(self):
assert excinfo.value.code == INVALID_BOOL
-class TestValidateChoice(object):
+class TestValidateChoice:
def test_it_accepts_valid_choice(self):
result = validate_choice('choice', ('choice', 'another_choice'))
@@ -49,7 +58,7 @@ def test_it_rejects_invalid_choice(self):
assert excinfo.value.code == INVALID_CHOICE
-class TestValidateList(object):
+class TestValidateList:
def test_it_accepts_list_types(self):
result = validate_list(['choice', 'another_choice'])
@@ -70,16 +79,16 @@ def iterator():
def test_it_rejects_string_types(self):
with raises(ValidationError) as excinfo:
- result = validate_list('choice')
+ validate_list('choice')
assert excinfo.value.code == INVALID_LIST
-class TestValidateDirectory(object):
+class TestValidateDirectory:
def test_it_uses_validate_path(self, tmpdir):
patcher = patch('readthedocs.config.validation.validate_path')
with patcher as validate_path:
- path = text_type(tmpdir.mkdir('a directory'))
+ path = str(tmpdir.mkdir('a directory'))
validate_path.return_value = path
validate_directory(path, str(tmpdir))
validate_path.assert_called_with(path, str(tmpdir))
@@ -91,7 +100,7 @@ def test_it_rejects_files(self, tmpdir):
assert excinfo.value.code == INVALID_DIRECTORY
-class TestValidateFile(object):
+class TestValidateFile:
def test_it_uses_validate_path(self, tmpdir):
patcher = patch('readthedocs.config.validation.validate_path')
@@ -110,7 +119,7 @@ def test_it_rejects_directories(self, tmpdir):
assert excinfo.value.code == INVALID_FILE
-class TestValidatePath(object):
+class TestValidatePath:
def test_it_accepts_relative_path(self, tmpdir):
tmpdir.mkdir('a directory')
@@ -140,15 +149,15 @@ def test_it_rejects_non_existent_path(self, tmpdir):
assert excinfo.value.code == INVALID_PATH
-class TestValidateString(object):
+class TestValidateString:
def test_it_accepts_unicode(self):
- result = validate_string(u'Unicöde')
- assert isinstance(result, text_type)
+ result = validate_string('Unicöde')
+ assert isinstance(result, str)
def test_it_accepts_nonunicode(self):
result = validate_string('Unicode')
- assert isinstance(result, text_type)
+ assert isinstance(result, str)
def test_it_rejects_float(self):
with raises(ValidationError) as excinfo:
diff --git a/readthedocs/config/tests/utils.py b/readthedocs/config/tests/utils.py
index b1b312420bb..4dd6a53313c 100644
--- a/readthedocs/config/tests/utils.py
+++ b/readthedocs/config/tests/utils.py
@@ -1,11 +1,11 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
def apply_fs(tmpdir, contents):
"""
- Create the directory structure specified in ``contents``. It's a dict of
- filenames as keys and the file contents as values. If the value is another
- dict, it's a subdirectory.
+ Create the directory structure specified in ``contents``.
+
+ It's a dict of filenames as keys and the file contents as values. If the
+ value is another dict, it's a subdirectory.
"""
for filename, content in contents.items():
if hasattr(content, 'items'):
diff --git a/readthedocs/config/validation.py b/readthedocs/config/validation.py
index ab9164f335e..5d7651dffd8 100644
--- a/readthedocs/config/validation.py
+++ b/readthedocs/config/validation.py
@@ -1,9 +1,8 @@
-"""Validations for the RTD configuration file."""
-from __future__ import division, print_function, unicode_literals
+# -*- coding: utf-8 -*-
+"""Validations for the RTD configuration file."""
import os
-from six import string_types, text_type
INVALID_BOOL = 'invalid-bool'
INVALID_CHOICE = 'invalid-choice'
@@ -29,7 +28,7 @@ class ValidationError(Exception):
INVALID_PATH: 'path {value} does not exist',
INVALID_STRING: 'expected string',
INVALID_LIST: 'expected list',
- VALUE_NOT_FOUND: '{value} not found'
+ VALUE_NOT_FOUND: '{value} not found',
}
def __init__(self, value, code, format_kwargs=None):
@@ -41,12 +40,12 @@ def __init__(self, value, code, format_kwargs=None):
if format_kwargs is not None:
defaults.update(format_kwargs)
message = self.messages[code].format(**defaults)
- super(ValidationError, self).__init__(message)
+ super().__init__(message)
def validate_list(value):
"""Check if ``value`` is an iterable."""
- if isinstance(value, (dict, string_types)):
+ if isinstance(value, (dict, str)):
raise ValidationError(value, INVALID_LIST)
if not hasattr(value, '__iter__'):
raise ValidationError(value, INVALID_LIST)
@@ -63,9 +62,13 @@ def validate_choice(value, choices):
"""Check that ``value`` is in ``choices``."""
choices = validate_list(choices)
if value not in choices:
- raise ValidationError(value, INVALID_CHOICE, {
- 'choices': ', '.join(map(str, choices))
- })
+ raise ValidationError(
+ value,
+ INVALID_CHOICE,
+ {
+ 'choices': ', '.join(map(str, choices)),
+ },
+ )
return value
@@ -113,6 +116,6 @@ def validate_path(value, base_path):
def validate_string(value):
"""Check that ``value`` is a string type."""
- if not isinstance(value, string_types):
+ if not isinstance(value, str):
raise ValidationError(value, INVALID_STRING)
- return text_type(value)
+ return str(value)
diff --git a/readthedocs/constants.py b/readthedocs/constants.py
index b9796e8f998..579b937715a 100644
--- a/readthedocs/constants.py
+++ b/readthedocs/constants.py
@@ -1,6 +1,7 @@
-"""Common constants"""
+# -*- coding: utf-8 -*-
+
+"""Common constants."""
-from __future__ import absolute_import
from readthedocs.builds.version_slug import VERSION_SLUG_REGEX
from readthedocs.projects.constants import LANGUAGES_REGEX, PROJECT_SLUG_REGEX
diff --git a/readthedocs/core/__init__.py b/readthedocs/core/__init__.py
index ed1c53debf0..21fae505765 100644
--- a/readthedocs/core/__init__.py
+++ b/readthedocs/core/__init__.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""App initialization."""
default_app_config = 'readthedocs.core.apps.CoreAppConfig'
diff --git a/readthedocs/core/adapters.py b/readthedocs/core/adapters.py
index 170f8954e26..c91ad9c3619 100644
--- a/readthedocs/core/adapters.py
+++ b/readthedocs/core/adapters.py
@@ -1,6 +1,7 @@
-"""Allauth overrides"""
+# -*- coding: utf-8 -*-
+
+"""Allauth overrides."""
-from __future__ import absolute_import
import json
import logging
@@ -9,6 +10,7 @@
from readthedocs.core.utils import send_email
+
try:
from django.utils.encoding import force_text
except ImportError:
@@ -19,16 +21,17 @@
class AccountAdapter(DefaultAccountAdapter):
- """Customize Allauth emails to match our current patterns"""
+ """Customize Allauth emails to match our current patterns."""
def format_email_subject(self, subject):
return force_text(subject)
def send_mail(self, template_prefix, email, context):
subject = render_to_string(
- '{0}_subject.txt'.format(template_prefix), context
+ '{}_subject.txt'.format(template_prefix),
+ context,
)
- subject = " ".join(subject.splitlines()).strip()
+ subject = ' '.join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
# Allauth sends some additional data in the context, remove it if the
@@ -41,13 +44,15 @@ def send_mail(self, template_prefix, email, context):
removed_keys.append(key)
del context[key]
if removed_keys:
- log.debug('Removed context we were unable to serialize: %s',
- removed_keys)
+ log.debug(
+ 'Removed context we were unable to serialize: %s',
+ removed_keys,
+ )
send_email(
recipient=email,
subject=subject,
- template='{0}_message.txt'.format(template_prefix),
- template_html='{0}_message.html'.format(template_prefix),
- context=context
+ template='{}_message.txt'.format(template_prefix),
+ template_html='{}_message.html'.format(template_prefix),
+ context=context,
)
diff --git a/readthedocs/core/admin.py b/readthedocs/core/admin.py
index b30f5460484..d0f2051d47c 100644
--- a/readthedocs/core/admin.py
+++ b/readthedocs/core/admin.py
@@ -1,13 +1,14 @@
+# -*- coding: utf-8 -*-
+
"""Django admin interface for core models."""
-from __future__ import absolute_import
from datetime import timedelta
from django.contrib import admin
-from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin
-from django.utils.translation import ugettext_lazy as _
+from django.contrib.auth.models import User
from django.utils import timezone
+from django.utils.translation import ugettext_lazy as _
from readthedocs.core.models import UserProfile
from readthedocs.projects.models import Project
@@ -59,8 +60,14 @@ class UserAdminExtra(UserAdmin):
"""Admin configuration for User."""
- list_display = ('username', 'email', 'first_name',
- 'last_name', 'is_staff', 'is_banned')
+ list_display = (
+ 'username',
+ 'email',
+ 'first_name',
+ 'last_name',
+ 'is_staff',
+ 'is_banned',
+ )
list_filter = (UserProjectFilter,) + UserAdmin.list_filter
actions = ['ban_user']
inlines = [UserProjectInline]
diff --git a/readthedocs/core/apps.py b/readthedocs/core/apps.py
index 4a9875ffb13..ac5a39d63fd 100644
--- a/readthedocs/core/apps.py
+++ b/readthedocs/core/apps.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
+
"""App configurations for core app."""
-from __future__ import absolute_import
from django.apps import AppConfig
diff --git a/readthedocs/core/backends.py b/readthedocs/core/backends.py
index 6a8b8ec9007..b7f9bf788a7 100644
--- a/readthedocs/core/backends.py
+++ b/readthedocs/core/backends.py
@@ -1,10 +1,11 @@
+# -*- coding: utf-8 -*-
+
"""Email backends for core app."""
-from __future__ import absolute_import
import smtplib
-from django.core.mail.utils import DNS_NAME
from django.core.mail.backends.smtp import EmailBackend
+from django.core.mail.utils import DNS_NAME
class SSLEmailBackend(EmailBackend):
@@ -13,8 +14,11 @@ def open(self):
if self.connection:
return False
try:
- self.connection = smtplib.SMTP_SSL(self.host, self.port,
- local_hostname=DNS_NAME.get_fqdn())
+ self.connection = smtplib.SMTP_SSL(
+ self.host,
+ self.port,
+ local_hostname=DNS_NAME.get_fqdn(),
+ )
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
diff --git a/readthedocs/core/context_processors.py b/readthedocs/core/context_processors.py
index 6ac71df2ac7..8bddb9bd10f 100644
--- a/readthedocs/core/context_processors.py
+++ b/readthedocs/core/context_processors.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
+
"""Template context processors for core app."""
-from __future__ import absolute_import
from django.conf import settings
@@ -11,10 +12,17 @@ def readthedocs_processor(request):
'PRODUCTION_DOMAIN': getattr(settings, 'PRODUCTION_DOMAIN', None),
'USE_SUBDOMAINS': getattr(settings, 'USE_SUBDOMAINS', None),
'GLOBAL_ANALYTICS_CODE': getattr(settings, 'GLOBAL_ANALYTICS_CODE'),
- 'DASHBOARD_ANALYTICS_CODE': getattr(settings, 'DASHBOARD_ANALYTICS_CODE'),
+ 'DASHBOARD_ANALYTICS_CODE': getattr(
+ settings,
+ 'DASHBOARD_ANALYTICS_CODE',
+ ),
'SITE_ROOT': getattr(settings, 'SITE_ROOT', '') + '/',
'TEMPLATE_ROOT': getattr(settings, 'TEMPLATE_ROOT', '') + '/',
- 'DO_NOT_TRACK_ENABLED': getattr(settings, 'DO_NOT_TRACK_ENABLED', False),
+ 'DO_NOT_TRACK_ENABLED': getattr(
+ settings,
+ 'DO_NOT_TRACK_ENABLED',
+ False,
+ ),
'USE_PROMOS': getattr(settings, 'USE_PROMOS', False),
}
return exports
diff --git a/readthedocs/core/fields.py b/readthedocs/core/fields.py
index 87f06090870..5801d30146b 100644
--- a/readthedocs/core/fields.py
+++ b/readthedocs/core/fields.py
@@ -1,10 +1,11 @@
-"""Shared model fields and defaults"""
+# -*- coding: utf-8 -*-
+
+"""Shared model fields and defaults."""
-from __future__ import absolute_import
import binascii
import os
def default_token():
- """Generate default value for token field"""
+ """Generate default value for token field."""
return binascii.hexlify(os.urandom(20)).decode()
diff --git a/readthedocs/core/fixtures/flag_types.json b/readthedocs/core/fixtures/flag_types.json
index afe8ef1cd79..52f13740581 100644
--- a/readthedocs/core/fixtures/flag_types.json
+++ b/readthedocs/core/fixtures/flag_types.json
@@ -1,28 +1,28 @@
[
{
- "pk": 1,
- "model": "flagging.flagtype",
+ "pk": 1,
+ "model": "flagging.flagtype",
"fields": {
- "description": "This item is inappropriate to the purpose of the site",
- "slug": "inappropriate",
+ "description": "This item is inappropriate to the purpose of the site",
+ "slug": "inappropriate",
"title": "Inappropriate"
}
- },
+ },
{
- "pk": 2,
- "model": "flagging.flagtype",
+ "pk": 2,
+ "model": "flagging.flagtype",
"fields": {
- "description": "This item is spam",
- "slug": "spam",
+ "description": "This item is spam",
+ "slug": "spam",
"title": "Spam"
}
- },
+ },
{
- "pk": 3,
- "model": "flagging.flagtype",
+ "pk": 3,
+ "model": "flagging.flagtype",
"fields": {
- "description": "These docs are a duplicate of other, official docs, on the site",
- "slug": "duplicate",
+ "description": "These docs are a duplicate of other, official docs, on the site",
+ "slug": "duplicate",
"title": "Duplicate"
}
}
diff --git a/readthedocs/core/forms.py b/readthedocs/core/forms.py
index bc062286547..34ebfbd0d2e 100644
--- a/readthedocs/core/forms.py
+++ b/readthedocs/core/forms.py
@@ -1,11 +1,8 @@
# -*- coding: utf-8 -*-
-"""Forms for core app."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Forms for core app."""
import logging
-from builtins import object
from django import forms
from django.contrib.auth.models import User
@@ -14,6 +11,7 @@
from .models import UserProfile
+
log = logging.getLogger(__name__)
@@ -21,13 +19,13 @@ class UserProfileForm(forms.ModelForm):
first_name = CharField(label=_('First name'), required=False, max_length=30)
last_name = CharField(label=_('Last name'), required=False, max_length=30)
- class Meta(object):
+ class Meta:
model = UserProfile
# Don't allow users edit someone else's user page
fields = ['first_name', 'last_name', 'homepage']
def __init__(self, *args, **kwargs):
- super(UserProfileForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
try:
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
@@ -37,7 +35,7 @@ def __init__(self, *args, **kwargs):
def save(self, commit=True):
first_name = self.cleaned_data.pop('first_name', None)
last_name = self.cleaned_data.pop('last_name', None)
- profile = super(UserProfileForm, self).save(commit=commit)
+ profile = super().save(commit=commit)
if commit:
user = profile.user
user.first_name = first_name
@@ -52,7 +50,7 @@ class UserDeleteForm(forms.ModelForm):
help_text=_('Please type your username to confirm.'),
)
- class Meta(object):
+ class Meta:
model = User
fields = ['username']
@@ -66,7 +64,8 @@ def clean_username(self):
class UserAdvertisingForm(forms.ModelForm):
- class Meta(object):
+
+ class Meta:
model = UserProfile
fields = ['allow_ads']
diff --git a/readthedocs/core/management/commands/archive.py b/readthedocs/core/management/commands/archive.py
index 33a35bc56c3..037b7319883 100644
--- a/readthedocs/core/management/commands/archive.py
+++ b/readthedocs/core/management/commands/archive.py
@@ -1,15 +1,16 @@
-"""Rebuild documentation for all projects"""
+# -*- coding: utf-8 -*-
+
+"""Rebuild documentation for all projects."""
-from __future__ import absolute_import
-from __future__ import print_function
-from glob import glob
-import os
import logging
+import os
+from glob import glob
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import loader as template_loader
+
log = logging.getLogger(__name__)
@@ -21,10 +22,10 @@ def handle(self, *args, **options):
doc_index = {}
os.chdir(settings.DOCROOT)
- for directory in glob("*"):
+ for directory in glob('*'):
doc_index[directory] = []
path = os.path.join(directory, 'rtd-builds')
- for version in glob(os.path.join(path, "*")):
+ for version in glob(os.path.join(path, '*')):
v = version.replace(path + '/', '')
doc_index[directory].append(v)
@@ -32,5 +33,7 @@ def handle(self, *args, **options):
'doc_index': doc_index,
'MEDIA_URL': settings.MEDIA_URL,
}
- html = template_loader.get_template('archive/index.html').render(context)
+ html = template_loader.get_template(
+ 'archive/index.html',
+ ).render(context)
print(html)
diff --git a/readthedocs/core/management/commands/clean_builds.py b/readthedocs/core/management/commands/clean_builds.py
index e47a651cf8a..d46b8cd1089 100644
--- a/readthedocs/core/management/commands/clean_builds.py
+++ b/readthedocs/core/management/commands/clean_builds.py
@@ -1,9 +1,9 @@
-"""Clean up stable build paths per project version"""
+# -*- coding: utf-8 -*-
+
+"""Clean up stable build paths per project version."""
-from __future__ import absolute_import
-from datetime import timedelta
import logging
-from optparse import make_option
+from datetime import timedelta
from django.core.management.base import BaseCommand
from django.db.models import Max
@@ -11,6 +11,7 @@
from readthedocs.builds.models import Build, Version
+
log = logging.getLogger(__name__)
@@ -24,24 +25,24 @@ def add_arguments(self, parser):
dest='days',
type='int',
default=365,
- help='Find builds older than DAYS days, default: 365'
+ help='Find builds older than DAYS days, default: 365',
)
parser.add_argument(
'--dryrun',
action='store_true',
dest='dryrun',
- help='Perform dry run on build cleanup'
+ help='Perform dry run on build cleanup',
)
def handle(self, *args, **options):
- """Find stale builds and remove build paths"""
+ """Find stale builds and remove build paths."""
max_date = timezone.now() - timedelta(days=options['days'])
- queryset = (Build.objects
- .values('project', 'version')
- .annotate(max_date=Max('date'))
- .filter(max_date__lt=max_date)
- .order_by('-max_date'))
+ queryset = (
+ Build.objects.values('project', 'version').annotate(
+ max_date=Max('date'),
+ ).filter(max_date__lt=max_date).order_by('-max_date')
+ )
for build in queryset:
try:
# Get version from build version id, perform sanity check on
diff --git a/readthedocs/core/management/commands/import_github.py b/readthedocs/core/management/commands/import_github.py
index 3bb34b1b4ad..90ce69e9e6a 100644
--- a/readthedocs/core/management/commands/import_github.py
+++ b/readthedocs/core/management/commands/import_github.py
@@ -1,8 +1,9 @@
-"""Resync GitHub project for user"""
+# -*- coding: utf-8 -*-
+
+"""Resync GitHub project for user."""
-from __future__ import absolute_import
-from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
+from django.core.management.base import BaseCommand
from readthedocs.oauth.services import GitHubService
@@ -15,6 +16,8 @@ def handle(self, *args, **options):
if args:
for slug in args:
for service in GitHubService.for_user(
- User.objects.get(username=slug)
+ User.objects.get(
+ username=slug,
+ ),
):
service.sync()
diff --git a/readthedocs/core/management/commands/import_github_language.py b/readthedocs/core/management/commands/import_github_language.py
index ef2945ea557..bf99ac265c0 100644
--- a/readthedocs/core/management/commands/import_github_language.py
+++ b/readthedocs/core/management/commands/import_github_language.py
@@ -1,5 +1,7 @@
+# -*- coding: utf-8 -*-
+
"""
-Import a project's programming language from GitHub
+Import a project's programming language from GitHub.
This builds a basic management command that will set
a projects language to the most used one in GitHub.
@@ -8,16 +10,15 @@
which should contain a proper GitHub Oauth Token for rate limiting.
"""
-from __future__ import absolute_import
-from __future__ import print_function
import os
-import requests
-from django.core.management.base import BaseCommand
+import requests
from django.core.cache import cache
+from django.core.management.base import BaseCommand
-from readthedocs.projects.models import Project
from readthedocs.projects.constants import GITHUB_REGEXS, PROGRAMMING_LANGUAGES
+from readthedocs.projects.models import Project
+
PL_DICT = {}
@@ -36,11 +37,7 @@ def handle(self, *args, **options):
print('Invalid GitHub token, exiting')
return
- for project in Project.objects.filter(
- programming_language__in=['none', '', 'words']
- ).filter(
- repo__contains='github'
- ):
+ for project in Project.objects.filter(programming_language__in=['none', '', 'words']).filter(repo__contains='github'): # noqa
user = repo = ''
repo_url = project.repo
for regex in GITHUB_REGEXS:
@@ -53,7 +50,7 @@ def handle(self, *args, **options):
print('No GitHub repo for %s' % repo_url)
continue
- cache_key = '%s-%s' % (user, repo)
+ cache_key = '{}-{}'.format(user, repo)
top_lang = cache.get(cache_key, None)
if not top_lang:
url = 'https://api.github.com/repos/{user}/{repo}/languages'.format(
@@ -66,15 +63,21 @@ def handle(self, *args, **options):
languages = resp.json()
if not languages:
continue
- sorted_langs = sorted(list(languages.items()), key=lambda x: x[1], reverse=True)
+ sorted_langs = sorted(
+ list(languages.items()),
+ key=lambda x: x[1],
+ reverse=True,
+ )
print('Sorted langs: %s ' % sorted_langs)
top_lang = sorted_langs[0][0]
else:
print('Cached top_lang: %s' % top_lang)
if top_lang in PL_DICT:
slug = PL_DICT[top_lang]
- print('Setting %s to %s' % (repo_url, slug))
- Project.objects.filter(pk=project.pk).update(programming_language=slug)
+ print('Setting {} to {}'.format(repo_url, slug))
+ Project.objects.filter(
+ pk=project.pk,
+ ).update(programming_language=slug)
else:
print('Language unknown: %s' % top_lang)
cache.set(cache_key, top_lang, 60 * 600)
diff --git a/readthedocs/core/management/commands/provision_elasticsearch.py b/readthedocs/core/management/commands/provision_elasticsearch.py
index 9f29fa37a9e..b1efe643c61 100644
--- a/readthedocs/core/management/commands/provision_elasticsearch.py
+++ b/readthedocs/core/management/commands/provision_elasticsearch.py
@@ -1,11 +1,18 @@
-"""Provision Elastic Search"""
+# -*- coding: utf-8 -*-
+
+"""Provision Elastic Search."""
-from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
-from readthedocs.search.indexes import Index, PageIndex, ProjectIndex, SectionIndex
+from readthedocs.search.indexes import (
+ Index,
+ PageIndex,
+ ProjectIndex,
+ SectionIndex,
+)
+
log = logging.getLogger(__name__)
@@ -15,19 +22,19 @@ class Command(BaseCommand):
help = __doc__
def handle(self, *args, **options):
- """Provision new ES instance"""
+ """Provision new ES instance."""
index = Index()
index_name = index.timestamped_index()
- log.info("Creating indexes..")
+ log.info('Creating indexes..')
index.create_index(index_name)
index.update_aliases(index_name)
- log.info("Updating mappings..")
+ log.info('Updating mappings..')
proj = ProjectIndex()
proj.put_mapping()
page = PageIndex()
page.put_mapping()
sec = SectionIndex()
sec.put_mapping()
- log.info("Done!")
+ log.info('Done!')
diff --git a/readthedocs/core/management/commands/pull.py b/readthedocs/core/management/commands/pull.py
index 3540a35c077..ae62f9da10c 100644
--- a/readthedocs/core/management/commands/pull.py
+++ b/readthedocs/core/management/commands/pull.py
@@ -1,6 +1,7 @@
-"""Trigger build for project slug"""
+# -*- coding: utf-8 -*-
+
+"""Trigger build for project slug."""
-from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
diff --git a/readthedocs/core/management/commands/reindex_elasticsearch.py b/readthedocs/core/management/commands/reindex_elasticsearch.py
index 7a5f25a065a..bcc199a26c1 100644
--- a/readthedocs/core/management/commands/reindex_elasticsearch.py
+++ b/readthedocs/core/management/commands/reindex_elasticsearch.py
@@ -1,17 +1,17 @@
-"""Reindex Elastic Search indexes"""
+# -*- coding: utf-8 -*-
+
+"""Reindex Elastic Search indexes."""
-from __future__ import absolute_import
import logging
-from optparse import make_option
-from django.core.management.base import BaseCommand
-from django.core.management.base import CommandError
from django.conf import settings
+from django.core.management.base import BaseCommand, CommandError
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.projects.tasks import update_search
+
log = logging.getLogger(__name__)
@@ -24,11 +24,11 @@ def add_arguments(self, parser):
'-p',
dest='project',
default='',
- help='Project to index'
+ help='Project to index',
)
def handle(self, *args, **options):
- """Build/index all versions or a single project's version"""
+ """Build/index all versions or a single project's version."""
project = options['project']
queryset = Version.objects.all()
@@ -37,13 +37,14 @@ def handle(self, *args, **options):
queryset = queryset.filter(project__slug=project)
if not queryset.exists():
raise CommandError(
- 'No project with slug: {slug}'.format(slug=project))
- log.info("Building all versions for %s", project)
+ 'No project with slug: {slug}'.format(slug=project),
+ )
+ log.info('Building all versions for %s', project)
elif getattr(settings, 'INDEX_ONLY_LATEST', True):
queryset = queryset.filter(slug=LATEST)
for version in queryset:
- log.info("Reindexing %s", version)
+ log.info('Reindexing %s', version)
try:
commit = version.project.vcs_repo(version.slug).commit
except: # noqa
@@ -52,7 +53,10 @@ def handle(self, *args, **options):
commit = None
try:
- update_search(version.pk, commit,
- delete_non_commit_files=False)
+ update_search(
+ version.pk,
+ commit,
+ delete_non_commit_files=False,
+ )
except Exception as e:
log.exception('Reindex failed for %s, %s', version, e)
diff --git a/readthedocs/core/management/commands/set_metadata.py b/readthedocs/core/management/commands/set_metadata.py
index dbefcddbdd0..62384fe0e0e 100644
--- a/readthedocs/core/management/commands/set_metadata.py
+++ b/readthedocs/core/management/commands/set_metadata.py
@@ -1,13 +1,15 @@
-"""Generate metadata for all projects"""
+# -*- coding: utf-8 -*-
+
+"""Generate metadata for all projects."""
-from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
+from readthedocs.core.utils import broadcast
from readthedocs.projects import tasks
from readthedocs.projects.models import Project
-from readthedocs.core.utils import broadcast
+
log = logging.getLogger(__name__)
@@ -19,8 +21,12 @@ class Command(BaseCommand):
def handle(self, *args, **options):
queryset = Project.objects.all()
for p in queryset:
- log.info("Generating metadata for %s", p)
+ log.info('Generating metadata for %s', p)
try:
- broadcast(type='app', task=tasks.update_static_metadata, args=[p.pk])
+ broadcast(
+ type='app',
+ task=tasks.update_static_metadata,
+ args=[p.pk],
+ )
except Exception:
log.exception('Build failed for %s', p)
diff --git a/readthedocs/core/management/commands/symlink.py b/readthedocs/core/management/commands/symlink.py
index e5c039c622b..2d843b9baab 100644
--- a/readthedocs/core/management/commands/symlink.py
+++ b/readthedocs/core/management/commands/symlink.py
@@ -1,14 +1,15 @@
-"""Update symlinks for projects"""
+# -*- coding: utf-8 -*-
+
+"""Update symlinks for projects."""
-from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
from readthedocs.projects import tasks
-
from readthedocs.projects.models import Project
+
log = logging.getLogger(__name__)
@@ -24,7 +25,9 @@ def handle(self, *args, **options):
if 'all' in projects:
pks = Project.objects.values_list('pk', flat=True)
else:
- pks = Project.objects.filter(slug__in=projects).values_list('pk', flat=True)
+ pks = Project.objects.filter(
+ slug__in=projects,
+ ).values_list('pk', flat=True)
for proj in pks:
try:
tasks.symlink_project(project_pk=proj)
diff --git a/readthedocs/core/management/commands/update_api.py b/readthedocs/core/management/commands/update_api.py
index e95b23b1899..68b695c4d52 100644
--- a/readthedocs/core/management/commands/update_api.py
+++ b/readthedocs/core/management/commands/update_api.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Build documentation using the API and not hitting a database.
@@ -6,7 +8,6 @@
./manage.py update_api
"""
-from __future__ import absolute_import
import logging
from django.core.management.base import BaseCommand
@@ -32,6 +33,6 @@ def handle(self, *args, **options):
for slug in options['projects']:
project_data = api.project(slug).get()
p = APIProject(**project_data)
- log.info("Building %s", p)
+ log.info('Building %s', p)
# pylint: disable=no-value-for-parameter
tasks.update_docs_task(p.pk, docker=docker)
diff --git a/readthedocs/core/management/commands/update_repos.py b/readthedocs/core/management/commands/update_repos.py
index b5233d2d6df..17d0951909e 100644
--- a/readthedocs/core/management/commands/update_repos.py
+++ b/readthedocs/core/management/commands/update_repos.py
@@ -6,9 +6,6 @@
Invoked via ``./manage.py update_repos``.
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import logging
from django.core.management.base import BaseCommand
@@ -18,6 +15,7 @@
from readthedocs.projects import tasks
from readthedocs.projects.models import Project
+
log = logging.getLogger(__name__)
diff --git a/readthedocs/core/management/commands/update_versions.py b/readthedocs/core/management/commands/update_versions.py
index 8961d6706cf..aacb47635a1 100644
--- a/readthedocs/core/management/commands/update_versions.py
+++ b/readthedocs/core/management/commands/update_versions.py
@@ -1,6 +1,7 @@
-"""Rebuild documentation for all projects"""
+# -*- coding: utf-8 -*-
+
+"""Rebuild documentation for all projects."""
-from __future__ import absolute_import
from django.core.management.base import BaseCommand
from readthedocs.builds.models import Version
@@ -17,5 +18,5 @@ def handle(self, *args, **options):
update_docs_task(
version.project_id,
record=False,
- version_pk=version.pk
+ version_pk=version.pk,
)
diff --git a/readthedocs/core/middleware.py b/readthedocs/core/middleware.py
index 262fcb064e0..6fc5bbe55c8 100644
--- a/readthedocs/core/middleware.py
+++ b/readthedocs/core/middleware.py
@@ -1,7 +1,6 @@
-"""Middleware for core app."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Middleware for core app."""
import logging
@@ -9,26 +8,27 @@
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.cache import cache
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
-from django.urls.base import get_urlconf, set_urlconf
from django.http import Http404, HttpResponseBadRequest
+from django.urls.base import set_urlconf
from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import ugettext_lazy as _
from readthedocs.core.utils import cname_to_slug
from readthedocs.projects.models import Domain, Project
+
log = logging.getLogger(__name__)
-LOG_TEMPLATE = u"(Middleware) {msg} [{host}{path}]"
+LOG_TEMPLATE = '(Middleware) {msg} [{host}{path}]'
SUBDOMAIN_URLCONF = getattr(
settings,
'SUBDOMAIN_URLCONF',
- 'readthedocs.core.urls.subdomain'
+ 'readthedocs.core.urls.subdomain',
)
SINGLE_VERSION_URLCONF = getattr(
settings,
'SINGLE_VERSION_URLCONF',
- 'readthedocs.core.urls.single_version'
+ 'readthedocs.core.urls.single_version',
)
@@ -54,7 +54,7 @@ def process_request(self, request):
production_domain = getattr(
settings,
'PRODUCTION_DOMAIN',
- 'readthedocs.org'
+ 'readthedocs.org',
)
if public_domain is None:
@@ -67,9 +67,8 @@ def process_request(self, request):
if len(domain_parts) == len(public_domain.split('.')) + 1:
subdomain = domain_parts[0]
is_www = subdomain.lower() == 'www'
- if not is_www and (
- # Support ports during local dev
- public_domain in host or public_domain in full_host
+ if not is_www and ( # Support ports during local dev
+ public_domain in host or public_domain in full_host
):
if not Project.objects.filter(slug=subdomain).exists():
raise Http404(_('Project not found'))
@@ -79,10 +78,10 @@ def process_request(self, request):
return None
# Serve CNAMEs
- if (public_domain not in host and
- production_domain not in host and
- 'localhost' not in host and
- 'testserver' not in host):
+ if (
+ public_domain not in host and production_domain not in host and
+ 'localhost' not in host and 'testserver' not in host
+ ):
request.cname = True
domains = Domain.objects.filter(domain=host)
if domains.count():
@@ -91,18 +90,26 @@ def process_request(self, request):
request.slug = domain.project.slug
request.urlconf = SUBDOMAIN_URLCONF
request.domain_object = True
- log.debug(LOG_TEMPLATE.format(
- msg='Domain Object Detected: %s' % domain.domain,
- **log_kwargs))
+ log.debug(
+ LOG_TEMPLATE.format(
+ msg='Domain Object Detected: %s' % domain.domain,
+ **log_kwargs
+ ),
+ )
break
- if (not hasattr(request, 'domain_object') and
- 'HTTP_X_RTD_SLUG' in request.META):
+ if (
+ not hasattr(request, 'domain_object') and
+ 'HTTP_X_RTD_SLUG' in request.META
+ ):
request.slug = request.META['HTTP_X_RTD_SLUG'].lower()
request.urlconf = SUBDOMAIN_URLCONF
request.rtdheader = True
- log.debug(LOG_TEMPLATE.format(
- msg='X-RTD-Slug header detected: %s' % request.slug,
- **log_kwargs))
+ log.debug(
+ LOG_TEMPLATE.format(
+ msg='X-RTD-Slug header detected: %s' % request.slug,
+ **log_kwargs
+ ),
+ )
# Try header first, then DNS
elif not hasattr(request, 'domain_object'):
try:
@@ -111,26 +118,39 @@ def process_request(self, request):
slug = cname_to_slug(host)
cache.set(host, slug, 60 * 60)
# Cache the slug -> host mapping permanently.
- log.info(LOG_TEMPLATE.format(
- msg='CNAME cached: %s->%s' % (slug, host),
- **log_kwargs))
+ log.info(
+ LOG_TEMPLATE.format(
+ msg='CNAME cached: {}->{}'.format(slug, host),
+ **log_kwargs
+ ),
+ )
request.slug = slug
request.urlconf = SUBDOMAIN_URLCONF
- log.warning(LOG_TEMPLATE.format(
- msg='CNAME detected: %s' % request.slug,
- **log_kwargs))
+ log.warning(
+ LOG_TEMPLATE.format(
+ msg='CNAME detected: %s' % request.slug,
+ **log_kwargs
+ ),
+ )
except: # noqa
# Some crazy person is CNAMEing to us. 404.
- log.warning(LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs))
+ log.warning(
+ LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs),
+ )
raise Http404(_('Invalid hostname'))
# Google was finding crazy www.blah.readthedocs.org domains.
# Block these explicitly after trying CNAME logic.
if len(domain_parts) > 3 and not settings.DEBUG:
# Stop www.fooo.readthedocs.org
if domain_parts[0] == 'www':
- log.debug(LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs))
+ log.debug(
+ LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs),
+ )
return HttpResponseBadRequest(_('Invalid hostname'))
- log.debug(LOG_TEMPLATE.format(msg='Allowing long domain name', **log_kwargs))
+ log.debug(
+ LOG_TEMPLATE
+ .format(msg='Allowing long domain name', **log_kwargs),
+ )
# raise Http404(_('Invalid hostname'))
# Normal request.
return None
@@ -188,8 +208,9 @@ def process_request(self, request):
host = request.get_host()
path = request.get_full_path()
log_kwargs = dict(host=host, path=path)
- log.debug(LOG_TEMPLATE.format(
- msg='Handling single_version request', **log_kwargs)
+ log.debug(
+ LOG_TEMPLATE.
+ format(msg='Handling single_version request', **log_kwargs),
)
return None
@@ -219,7 +240,7 @@ def process_request(self, request):
else:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs. The
# client's IP will be the first one.
- real_ip = real_ip.split(",")[0].strip()
+ real_ip = real_ip.split(',')[0].strip()
request.META['REMOTE_ADDR'] = real_ip
@@ -231,20 +252,26 @@ class FooterNoSessionMiddleware(SessionMiddleware):
This will reduce the size of our session table drastically.
"""
- IGNORE_URLS = ['/api/v2/footer_html', '/sustainability/view', '/sustainability/click']
+ IGNORE_URLS = [
+ '/api/v2/footer_html', '/sustainability/view', '/sustainability/click',
+ ]
def process_request(self, request):
for url in self.IGNORE_URLS:
- if (request.path_info.startswith(url) and
- settings.SESSION_COOKIE_NAME not in request.COOKIES):
+ if (
+ request.path_info.startswith(url) and
+ settings.SESSION_COOKIE_NAME not in request.COOKIES
+ ):
# Hack request.session otherwise the Authentication middleware complains.
request.session = {}
return
- super(FooterNoSessionMiddleware, self).process_request(request)
+ super().process_request(request)
def process_response(self, request, response):
for url in self.IGNORE_URLS:
- if (request.path_info.startswith(url) and
- settings.SESSION_COOKIE_NAME not in request.COOKIES):
+ if (
+ request.path_info.startswith(url) and
+ settings.SESSION_COOKIE_NAME not in request.COOKIES
+ ):
return response
- return super(FooterNoSessionMiddleware, self).process_response(request, response)
+ return super().process_response(request, response)
diff --git a/readthedocs/core/migrations/0001_initial.py b/readthedocs/core/migrations/0001_initial.py
index e3d5b948469..bb5bde04285 100644
--- a/readthedocs/core/migrations/0001_initial.py
+++ b/readthedocs/core/migrations/0001_initial.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/core/migrations/0002_make_userprofile_user_a_onetoonefield.py b/readthedocs/core/migrations/0002_make_userprofile_user_a_onetoonefield.py
index f5e6255cc6b..5f7d04ff910 100644
--- a/readthedocs/core/migrations/0002_make_userprofile_user_a_onetoonefield.py
+++ b/readthedocs/core/migrations/0002_make_userprofile_user_a_onetoonefield.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/core/migrations/0003_add_banned_status.py b/readthedocs/core/migrations/0003_add_banned_status.py
index f3dfbb8b777..95d26eefc42 100644
--- a/readthedocs/core/migrations/0003_add_banned_status.py
+++ b/readthedocs/core/migrations/0003_add_banned_status.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/core/migrations/0004_ad-opt-out.py b/readthedocs/core/migrations/0004_ad-opt-out.py
index 9e8c5bf3209..f5d6ae3d029 100644
--- a/readthedocs/core/migrations/0004_ad-opt-out.py
+++ b/readthedocs/core/migrations/0004_ad-opt-out.py
@@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-06-14 18:06
-from __future__ import unicode_literals
-
import annoying.fields
+import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
-import django.db.models.deletion
class Migration(migrations.Migration):
diff --git a/readthedocs/core/migrations/0005_migrate-old-passwords.py b/readthedocs/core/migrations/0005_migrate-old-passwords.py
index 2ef614d0db6..8a44107c90b 100644
--- a/readthedocs/core/migrations/0005_migrate-old-passwords.py
+++ b/readthedocs/core/migrations/0005_migrate-old-passwords.py
@@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
-from __future__ import unicode_literals
-
-from django.db import migrations
from django.contrib.auth.hashers import make_password
+from django.db import migrations
def forwards_func(apps, schema_editor):
diff --git a/readthedocs/core/mixins.py b/readthedocs/core/mixins.py
index 7655db20bd4..4d6b1160c1c 100644
--- a/readthedocs/core/mixins.py
+++ b/readthedocs/core/mixins.py
@@ -1,24 +1,24 @@
-"""Common mixin classes for views"""
+# -*- coding: utf-8 -*-
+
+"""Common mixin classes for views."""
-from __future__ import absolute_import
-from builtins import object
-from vanilla import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
+from vanilla import ListView
class ListViewWithForm(ListView):
- """List view that also exposes a create form"""
+ """List view that also exposes a create form."""
def get_context_data(self, **kwargs):
- context = super(ListViewWithForm, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['form'] = self.get_form(data=None, files=None)
return context
-class LoginRequiredMixin(object):
+class LoginRequiredMixin:
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
- return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
+ return super().dispatch(*args, **kwargs)
diff --git a/readthedocs/core/models.py b/readthedocs/core/models.py
index 8b72a79199f..bee057bdb6e 100644
--- a/readthedocs/core/models.py
+++ b/readthedocs/core/models.py
@@ -1,16 +1,15 @@
# -*- coding: utf-8 -*-
-"""Models for the core app."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Models for the core app."""
import logging
from annoying.fields import AutoOneToOneField
from django.db import models
+from django.urls import reverse
from django.utils.encoding import python_2_unicode_compatible
-from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
-from django.urls import reverse
+from django.utils.translation import ugettext_lazy as _
+
STANDARD_EMAIL = 'anonymous@readthedocs.org'
@@ -23,7 +22,10 @@ class UserProfile(models.Model):
"""Additional information about a User."""
user = AutoOneToOneField(
- 'auth.User', verbose_name=_('User'), related_name='profile')
+ 'auth.User',
+ verbose_name=_('User'),
+ related_name='profile',
+ )
whitelisted = models.BooleanField(_('Whitelisted'), default=False)
banned = models.BooleanField(_('Banned'), default=False)
homepage = models.CharField(_('Homepage'), max_length=100, blank=True)
@@ -41,10 +43,14 @@ class UserProfile(models.Model):
def __str__(self):
return (
ugettext("%(username)s's profile") %
- {'username': self.user.username})
+ {'username': self.user.username}
+ )
def get_absolute_url(self):
- return reverse('profiles_profile_detail', kwargs={'username': self.user.username})
+ return reverse(
+ 'profiles_profile_detail',
+ kwargs={'username': self.user.username},
+ )
def get_contribution_details(self):
"""
diff --git a/readthedocs/core/permissions.py b/readthedocs/core/permissions.py
index c8a7fe6821b..1c397eae11c 100644
--- a/readthedocs/core/permissions.py
+++ b/readthedocs/core/permissions.py
@@ -1,11 +1,11 @@
-"""Objects for User permission checks"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Objects for User permission checks."""
from readthedocs.core.utils.extend import SettingsOverrideObject
-class AdminPermissionBase(object):
+class AdminPermissionBase:
@classmethod
def is_admin(cls, user, project):
diff --git a/readthedocs/core/resolver.py b/readthedocs/core/resolver.py
index 43fce68b3f6..7758226cc2f 100644
--- a/readthedocs/core/resolver.py
+++ b/readthedocs/core/resolver.py
@@ -1,16 +1,16 @@
+# -*- coding: utf-8 -*-
+
"""URL resolver for documentation."""
-from __future__ import absolute_import
-from builtins import object
import re
from django.conf import settings
-from readthedocs.projects.constants import PRIVATE, PUBLIC
from readthedocs.core.utils.extend import SettingsOverrideObject
+from readthedocs.projects.constants import PRIVATE, PUBLIC
-class ResolverBase(object):
+class ResolverBase:
"""
Read the Docs URL Resolver.
@@ -51,35 +51,55 @@ class ResolverBase(object):
/docs//projects//
"""
- def base_resolve_path(self, project_slug, filename, version_slug=None,
- language=None, private=False, single_version=None,
- subproject_slug=None, subdomain=None, cname=None):
+ def base_resolve_path(
+ self,
+ project_slug,
+ filename,
+ version_slug=None,
+ language=None,
+ private=False,
+ single_version=None,
+ subproject_slug=None,
+ subdomain=None,
+ cname=None,
+ ):
"""Resolve a with nothing smart, just filling in the blanks."""
# Only support `/docs/project' URLs outside our normal environment. Normally
# the path should always have a subdomain or CNAME domain
# pylint: disable=unused-argument
if subdomain or cname or (self._use_subdomain()):
- url = u'/'
+ url = '/'
else:
- url = u'/docs/{project_slug}/'
+ url = '/docs/{project_slug}/'
if subproject_slug:
- url += u'projects/{subproject_slug}/'
+ url += 'projects/{subproject_slug}/'
if single_version:
- url += u'{filename}'
+ url += '{filename}'
else:
- url += u'{language}/{version_slug}/{filename}'
+ url += '{language}/{version_slug}/{filename}'
return url.format(
- project_slug=project_slug, filename=filename,
- version_slug=version_slug, language=language,
- single_version=single_version, subproject_slug=subproject_slug,
+ project_slug=project_slug,
+ filename=filename,
+ version_slug=version_slug,
+ language=language,
+ single_version=single_version,
+ subproject_slug=subproject_slug,
)
- def resolve_path(self, project, filename='', version_slug=None,
- language=None, single_version=None, subdomain=None,
- cname=None, private=None):
+ def resolve_path(
+ self,
+ project,
+ filename='',
+ version_slug=None,
+ language=None,
+ single_version=None,
+ subdomain=None,
+ cname=None,
+ private=None,
+ ):
"""Resolve a URL with a subset of fields defined."""
cname = cname or project.domains.filter(canonical=True).first()
version_slug = version_slug or project.get_default_version()
@@ -138,8 +158,10 @@ def resolve_domain(self, project, private=None):
return getattr(settings, 'PRODUCTION_DOMAIN')
- def resolve(self, project, require_https=False, filename='', private=None,
- **kwargs):
+ def resolve(
+ self, project, require_https=False, filename='', private=None,
+ **kwargs
+ ):
if private is None:
version_slug = kwargs.get('version_slug')
if version_slug is None:
@@ -173,8 +195,8 @@ def resolve(self, project, require_https=False, filename='', private=None,
return '{protocol}://{domain}{path}'.format(
protocol=protocol,
domain=domain,
- path=self.resolve_path(project, filename=filename, private=private,
- **kwargs),
+ path=self.
+ resolve_path(project, filename=filename, private=private, **kwargs),
)
def _get_canonical_project(self, project, projects=None):
@@ -212,7 +234,7 @@ def _get_project_subdomain(self, project):
if self._use_subdomain():
project = self._get_canonical_project(project)
subdomain_slug = project.slug.replace('_', '-')
- return "%s.%s" % (subdomain_slug, public_domain)
+ return '{}.{}'.format(subdomain_slug, public_domain)
def _get_project_custom_domain(self, project):
return project.domains.filter(canonical=True).first()
@@ -223,7 +245,11 @@ def _get_private(self, project, version_slug):
version = project.versions.get(slug=version_slug)
private = version.privacy_level == PRIVATE
except Version.DoesNotExist:
- private = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', PUBLIC) == PRIVATE
+ private = getattr(
+ settings,
+ 'DEFAULT_PRIVACY_LEVEL',
+ PUBLIC,
+ ) == PRIVATE
return private
def _fix_filename(self, project, filename):
@@ -241,17 +267,17 @@ def _fix_filename(self, project, filename):
if filename:
if filename.endswith('/') or filename.endswith('.html'):
path = filename
- elif project.documentation_type == "sphinx_singlehtml":
- path = "index.html#document-" + filename
- elif project.documentation_type in ["sphinx_htmldir", "mkdocs"]:
- path = filename + "/"
+ elif project.documentation_type == 'sphinx_singlehtml':
+ path = 'index.html#document-' + filename
+ elif project.documentation_type in ['sphinx_htmldir', 'mkdocs']:
+ path = filename + '/'
elif '#' in filename:
# do nothing if the filename contains URL fragments
path = filename
else:
- path = filename + ".html"
+ path = filename + '.html'
else:
- path = ""
+ path = ''
return path
def _use_custom_domain(self, custom_domain):
diff --git a/readthedocs/core/settings.py b/readthedocs/core/settings.py
index d66c6d02d28..5967893cc0e 100644
--- a/readthedocs/core/settings.py
+++ b/readthedocs/core/settings.py
@@ -1,12 +1,10 @@
"""Class based settings for complex settings inheritance."""
-from __future__ import absolute_import
-from builtins import object
import inspect
import sys
-class Settings(object):
+class Settings:
"""Class-based settings wrapper."""
diff --git a/readthedocs/core/signals.py b/readthedocs/core/signals.py
index d8b90b57d82..a4f029c6b3e 100644
--- a/readthedocs/core/signals.py
+++ b/readthedocs/core/signals.py
@@ -2,21 +2,19 @@
"""Signal handling for core app."""
-from __future__ import absolute_import
-
import logging
+from urllib.parse import urlparse
from corsheaders import signals
from django.conf import settings
+from django.db.models import Count, Q
from django.db.models.signals import pre_delete
-from django.dispatch import Signal
-from django.db.models import Q, Count
-from django.dispatch import receiver
-from future.backports.urllib.parse import urlparse
+from django.dispatch import Signal, receiver
from rest_framework.permissions import SAFE_METHODS
from readthedocs.oauth.models import RemoteOrganization
-from readthedocs.projects.models import Project, Domain
+from readthedocs.projects.models import Domain, Project
+
log = logging.getLogger(__name__)
@@ -92,15 +90,17 @@ def delete_projects_and_organizations(sender, instance, *args, **kwargs):
# https://github.com/rtfd/readthedocs.org/pull/4577
# https://docs.djangoproject.com/en/2.1/topics/db/aggregation/#order-of-annotate-and-filter-clauses # noqa
projects = (
- Project.objects.annotate(num_users=Count('users'))
- .filter(users=instance.id).exclude(num_users__gt=1)
+ Project.objects.annotate(num_users=Count('users')
+ ).filter(users=instance.id
+ ).exclude(num_users__gt=1)
)
# Here we count the users list from the organization that the user belong
# Then exclude the organizations where there are more than one user
oauth_organizations = (
- RemoteOrganization.objects.annotate(num_users=Count('users'))
- .filter(users=instance.id).exclude(num_users__gt=1)
+ RemoteOrganization.objects.annotate(num_users=Count('users')
+ ).filter(users=instance.id
+ ).exclude(num_users__gt=1)
)
projects.delete()
diff --git a/readthedocs/core/static.py b/readthedocs/core/static.py
index 89cd883877e..71d433b259c 100644
--- a/readthedocs/core/static.py
+++ b/readthedocs/core/static.py
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
from django.contrib.staticfiles.finders import FileSystemFinder
@@ -15,4 +13,4 @@ class SelectiveFileSystemFinder(FileSystemFinder):
def list(self, ignore_patterns):
ignore_patterns.extend(['epub', 'pdf', 'htmlzip', 'json', 'man'])
- return super(SelectiveFileSystemFinder, self).list(ignore_patterns)
+ return super().list(ignore_patterns)
diff --git a/readthedocs/core/static/core/font/fontawesome-webfont.svg b/readthedocs/core/static/core/font/fontawesome-webfont.svg
index 855c845e538..52c0773359b 100644
--- a/readthedocs/core/static/core/font/fontawesome-webfont.svg
+++ b/readthedocs/core/static/core/font/fontawesome-webfont.svg
@@ -8,7 +8,7 @@ Copyright Dave Gandy 2016. All rights reserved.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/readthedocs/core/symlink.py b/readthedocs/core/symlink.py
index ec110cf6a15..6bd19f5dd28 100644
--- a/readthedocs/core/symlink.py
+++ b/readthedocs/core/symlink.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
A class that manages the symlinks for nginx to serve public files.
@@ -52,19 +54,11 @@
fabric -> rtd-builds/fabric/en/latest/ # single version
"""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
import os
import shutil
from collections import OrderedDict
-from builtins import object
from django.conf import settings
from readthedocs.builds.models import Version
@@ -74,20 +68,23 @@
from readthedocs.projects import constants
from readthedocs.projects.models import Domain
+
log = logging.getLogger(__name__)
-class Symlink(object):
+class Symlink:
"""Base class for symlinking of projects."""
def __init__(self, project):
self.project = project
self.project_root = os.path.join(
- self.WEB_ROOT, project.slug
+ self.WEB_ROOT,
+ project.slug,
)
self.subproject_root = os.path.join(
- self.project_root, 'projects'
+ self.project_root,
+ 'projects',
)
self.environment = LocalEnvironment(project)
self.sanity_check()
@@ -99,9 +96,13 @@ def sanity_check(self):
This will leave it in the proper state for the single_project setting.
"""
if os.path.islink(self.project_root) and not self.project.single_version:
- log.info(constants.LOG_TEMPLATE.format(
- project=self.project.slug, version='',
- msg="Removing single version symlink"))
+ log.info(
+ constants.LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version='',
+ msg='Removing single version symlink',
+ ),
+ )
safe_unlink(self.project_root)
safe_makedirs(self.project_root)
elif (self.project.single_version and
@@ -154,13 +155,15 @@ def symlink_cnames(self, domain=None):
domains = Domain.objects.filter(project=self.project)
for dom in domains:
log_msg = 'Symlinking CNAME: {} -> {}'.format(
- dom.domain, self.project.slug
+ dom.domain,
+ self.project.slug,
)
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
- version='', msg=log_msg
- )
+ version='',
+ msg=log_msg,
+ ),
)
# CNAME to doc root
@@ -169,17 +172,26 @@ def symlink_cnames(self, domain=None):
# Project symlink
project_cname_symlink = os.path.join(
- self.PROJECT_CNAME_ROOT, dom.domain
+ self.PROJECT_CNAME_ROOT,
+ dom.domain,
)
self.environment.run(
- 'ln', '-nsf', self.project.doc_path, project_cname_symlink
+ 'ln',
+ '-nsf',
+ self.project.doc_path,
+ project_cname_symlink,
)
def remove_symlink_cname(self, domain):
"""Remove CNAME symlink."""
- log_msg = "Removing symlink for CNAME {0}".format(domain.domain)
- log.info(constants.LOG_TEMPLATE.format(project=self.project.slug,
- version='', msg=log_msg))
+ log_msg = 'Removing symlink for CNAME {}'.format(domain.domain)
+ log.info(
+ constants.LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version='',
+ msg=log_msg,
+ ),
+ )
symlink = os.path.join(self.CNAME_ROOT, domain.domain)
safe_unlink(symlink)
@@ -187,8 +199,7 @@ def symlink_subprojects(self):
"""
Symlink project subprojects.
- Link from $WEB_ROOT/projects/ ->
- $WEB_ROOT/
+ Link from $WEB_ROOT/projects/ -> $WEB_ROOT/
"""
subprojects = set()
rels = self.get_subprojects()
@@ -205,12 +216,21 @@ def symlink_subprojects(self):
from_to[rel.alias] = rel.child.slug
subprojects.add(rel.alias)
for from_slug, to_slug in list(from_to.items()):
- log_msg = "Symlinking subproject: {0} -> {1}".format(from_slug, to_slug)
- log.info(constants.LOG_TEMPLATE.format(project=self.project.slug,
- version='', msg=log_msg))
+ log_msg = 'Symlinking subproject: {} -> {}'.format(
+ from_slug,
+ to_slug,
+ )
+ log.info(
+ constants.LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version='',
+ msg=log_msg,
+ ),
+ )
symlink = os.path.join(self.subproject_root, from_slug)
docs_dir = os.path.join(
- self.WEB_ROOT, to_slug
+ self.WEB_ROOT,
+ to_slug,
)
symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1])
if not os.path.lexists(symlink_dir):
@@ -222,7 +242,8 @@ def symlink_subprojects(self):
if result.exit_code > 0:
log.error(
'Could not symlink path: status=%d error=%s',
- result.exit_code, result.error
+ result.exit_code,
+ result.error,
)
# Remove old symlinks
@@ -236,7 +257,7 @@ def symlink_translations(self):
Symlink project translations.
Link from $WEB_ROOT/// ->
- $WEB_ROOT///
+ $WEB_ROOT///
"""
translations = {}
@@ -256,8 +277,9 @@ def symlink_translations(self):
log.info(
constants.LOG_TEMPLATE.format(
project=self.project.slug,
- version='', msg=log_msg
- )
+ version='',
+ msg=log_msg,
+ ),
)
symlink = os.path.join(self.project_root, language)
docs_dir = os.path.join(self.WEB_ROOT, slug, language)
@@ -277,8 +299,9 @@ def symlink_single_version(self):
"""
Symlink project single version.
- Link from $WEB_ROOT/ ->
- HOME/user_builds//rtd-builds/latest/
+ Link from:
+
+ $WEB_ROOT/ -> HOME/user_builds//rtd-builds/latest/
"""
version = self.get_default_version()
@@ -295,7 +318,7 @@ def symlink_single_version(self):
settings.DOCROOT,
self.project.slug,
'rtd-builds',
- version.slug
+ version.slug,
)
self.environment.run('ln', '-nsf', docs_dir, symlink)
@@ -304,11 +327,13 @@ def symlink_versions(self):
Symlink project's versions.
Link from $WEB_ROOT//// ->
- HOME/user_builds//rtd-builds/
+ HOME/user_builds//rtd-builds/
"""
versions = set()
version_dir = os.path.join(
- self.WEB_ROOT, self.project.slug, self.project.language
+ self.WEB_ROOT,
+ self.project.slug,
+ self.project.language,
)
# Include active public versions,
# as well as public versions that are built but not active, for archived versions
@@ -322,15 +347,15 @@ def symlink_versions(self):
constants.LOG_TEMPLATE.format(
project=self.project.slug,
version='',
- msg=log_msg
- )
+ msg=log_msg,
+ ),
)
symlink = os.path.join(version_dir, version.slug)
docs_dir = os.path.join(
settings.DOCROOT,
self.project.slug,
'rtd-builds',
- version.slug
+ version.slug,
)
self.environment.run('ln', '-nsf', docs_dir, symlink)
versions.add(version.slug)
@@ -353,11 +378,18 @@ def get_default_version(self):
class PublicSymlinkBase(Symlink):
CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'public_cname_root')
WEB_ROOT = os.path.join(settings.SITE_ROOT, 'public_web_root')
- PROJECT_CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'public_cname_project')
+ PROJECT_CNAME_ROOT = os.path.join(
+ settings.SITE_ROOT,
+ 'public_cname_project',
+ )
def get_version_queryset(self):
- return (self.project.versions.protected(only_active=False).filter(built=True) |
- self.project.versions.protected(only_active=True))
+ return (
+ self.project.versions.protected(
+ only_active=False,
+ ).filter(built=True) |
+ self.project.versions.protected(only_active=True)
+ )
def get_subprojects(self):
return self.project.subprojects.protected()
@@ -369,11 +401,16 @@ def get_translations(self):
class PrivateSymlinkBase(Symlink):
CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'private_cname_root')
WEB_ROOT = os.path.join(settings.SITE_ROOT, 'private_web_root')
- PROJECT_CNAME_ROOT = os.path.join(settings.SITE_ROOT, 'private_cname_project')
+ PROJECT_CNAME_ROOT = os.path.join(
+ settings.SITE_ROOT,
+ 'private_cname_project',
+ )
def get_version_queryset(self):
- return (self.project.versions.private(only_active=False).filter(built=True) |
- self.project.versions.private(only_active=True))
+ return (
+ self.project.versions.private(only_active=False).filter(built=True) |
+ self.project.versions.private(only_active=True)
+ )
def get_subprojects(self):
return self.project.subprojects.private()
diff --git a/readthedocs/core/tasks.py b/readthedocs/core/tasks.py
index 8ed81f1bb1e..7c1b86f477f 100644
--- a/readthedocs/core/tasks.py
+++ b/readthedocs/core/tasks.py
@@ -1,12 +1,13 @@
+# -*- coding: utf-8 -*-
+
"""Basic tasks."""
-from __future__ import absolute_import
import logging
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
-from django.template.loader import get_template
from django.template import TemplateDoesNotExist
+from django.template.loader import get_template
from django.utils import timezone
from messages_extends.models import Message as PersistentMessage
@@ -19,8 +20,10 @@
@app.task(queue='web', time_limit=EMAIL_TIME_LIMIT)
-def send_email_task(recipient, subject, template, template_html,
- context=None, from_email=None, **kwargs):
+def send_email_task(
+ recipient, subject, template, template_html, context=None,
+ from_email=None, **kwargs
+):
"""
Send multipart email.
@@ -44,14 +47,15 @@ def send_email_task(recipient, subject, template, template_html,
"""
msg = EmailMultiAlternatives(
subject,
- get_template(template).render(context),
- from_email or settings.DEFAULT_FROM_EMAIL,
- [recipient],
- **kwargs
+ get_template(template).render(context), from_email or
+ settings.DEFAULT_FROM_EMAIL,
+ [recipient], **kwargs
)
try:
- msg.attach_alternative(get_template(template_html).render(context),
- 'text/html')
+ msg.attach_alternative(
+ get_template(template_html).render(context),
+ 'text/html',
+ )
except TemplateDoesNotExist:
pass
msg.send()
@@ -62,5 +66,7 @@ def send_email_task(recipient, subject, template, template_html,
def clear_persistent_messages():
# Delete all expired message_extend's messages
log.info("Deleting all expired message_extend's messages")
- expired_messages = PersistentMessage.objects.filter(expires__lt=timezone.now())
+ expired_messages = PersistentMessage.objects.filter(
+ expires__lt=timezone.now(),
+ )
expired_messages.delete()
diff --git a/readthedocs/core/templatetags/core_tags.py b/readthedocs/core/templatetags/core_tags.py
index e91c1fb667e..a94e511acf0 100644
--- a/readthedocs/core/templatetags/core_tags.py
+++ b/readthedocs/core/templatetags/core_tags.py
@@ -1,15 +1,14 @@
-"""Template tags for core app."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Template tags for core app."""
import hashlib
+from urllib.parse import urlencode
-from builtins import str # pylint: disable=redefined-builtin
from django import template
from django.conf import settings
from django.utils.encoding import force_bytes, force_text
from django.utils.safestring import mark_safe
-from future.backports.urllib.parse import urlencode
from readthedocs import __version__
from readthedocs.core.resolver import resolve
@@ -22,23 +21,25 @@
@register.filter
def gravatar(email, size=48):
"""
- Hacked from djangosnippets.org, but basically given an email address
+ Hacked from djangosnippets.org, but basically given an email address.
render an img tag with the hashed up bits needed for leetness
omgwtfstillreading
"""
- url = "http://www.gravatar.com/avatar.php?%s" % urlencode({
+ url = 'http://www.gravatar.com/avatar.php?%s' % urlencode({
'gravatar_id': hashlib.md5(email).hexdigest(),
- 'size': str(size)
+ 'size': str(size),
})
- return ('' % (url, size, size))
+ return (
+ '' % (url, size, size)
+ )
-@register.simple_tag(name="doc_url")
+@register.simple_tag(name='doc_url')
def make_document_url(project, version=None, page=''):
if not project:
- return ""
+ return ''
return resolve(project=project, version_slug=version, filename=page)
@@ -51,7 +52,7 @@ def restructuredtext(value, short=False):
if settings.DEBUG:
raise template.TemplateSyntaxError(
"Error in 'restructuredtext' filter: "
- "The Python docutils library isn't installed."
+ "The Python docutils library isn't installed.",
)
return force_text(value)
else:
@@ -59,20 +60,22 @@ def restructuredtext(value, short=False):
'raw_enabled': False,
'file_insertion_enabled': False,
}
- docutils_settings.update(getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {}))
+ docutils_settings.update(
+ getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {}),
+ )
try:
parts = publish_parts(
source=force_bytes(value),
- writer_name="html4css1",
+ writer_name='html4css1',
settings_overrides=docutils_settings,
)
except ApplicationError:
return force_text(value)
- out = force_text(parts["fragment"])
+ out = force_text(parts['fragment'])
try:
if short:
- out = out.split("\n")[0]
+ out = out.split('\n')[0]
except IndexError:
pass
return mark_safe(out)
diff --git a/readthedocs/core/templatetags/privacy_tags.py b/readthedocs/core/templatetags/privacy_tags.py
index d18778778f6..12d29f6624c 100644
--- a/readthedocs/core/templatetags/privacy_tags.py
+++ b/readthedocs/core/templatetags/privacy_tags.py
@@ -1,6 +1,6 @@
-"""Template tags to query projects by privacy."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Template tags to query projects by privacy."""
from django import template
@@ -18,6 +18,9 @@ def is_admin(user, project):
@register.simple_tag(takes_context=True)
def get_public_projects(context, user):
- projects = Project.objects.for_user_and_viewer(user=user, viewer=context['request'].user)
+ projects = Project.objects.for_user_and_viewer(
+ user=user,
+ viewer=context['request'].user,
+ )
context['public_projects'] = projects
return ''
diff --git a/readthedocs/core/tests/test_signals.py b/readthedocs/core/tests/test_signals.py
index c38705a1f8c..40dccdaa3a7 100644
--- a/readthedocs/core/tests/test_signals.py
+++ b/readthedocs/core/tests/test_signals.py
@@ -1,6 +1,6 @@
-import pytest
+# -*- coding: utf-8 -*-
import django_dynamic_fixture
-
+import pytest
from django.contrib.auth.models import User
from readthedocs.oauth.models import RemoteOrganization
@@ -8,15 +8,13 @@
@pytest.mark.django_db
-class TestProjectOrganizationSignal(object):
+class TestProjectOrganizationSignal:
@pytest.mark.parametrize('model_class', [Project, RemoteOrganization])
def test_project_organization_get_deleted_upon_user_delete(self, model_class):
- """
- If the user has Project or RemoteOrganization where he is the only user,
- upon deleting his account, the Project or RemoteOrganization should also get
- deleted.
- """
+ """If the user has Project or RemoteOrganization where he is the only
+ user, upon deleting his account, the Project or RemoteOrganization
+ should also get deleted."""
obj = django_dynamic_fixture.get(model_class)
user1 = django_dynamic_fixture.get(User)
@@ -33,10 +31,8 @@ def test_project_organization_get_deleted_upon_user_delete(self, model_class):
@pytest.mark.parametrize('model_class', [Project, RemoteOrganization])
def test_multiple_users_project_organization_not_delete(self, model_class):
- """
- Check Project or RemoteOrganization which have multiple users do not get deleted
- when any of the user delete his account.
- """
+ """Check Project or RemoteOrganization which have multiple users do not
+ get deleted when any of the user delete his account."""
obj = django_dynamic_fixture.get(model_class)
user1 = django_dynamic_fixture.get(User)
diff --git a/readthedocs/core/urls/__init__.py b/readthedocs/core/urls/__init__.py
index 48b2e9a614a..60e7fd32325 100644
--- a/readthedocs/core/urls/__init__.py
+++ b/readthedocs/core/urls/__init__.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""URL configuration for core app."""
from __future__ import absolute_import
@@ -8,53 +10,77 @@
from readthedocs.core.views import hooks, serve
from readthedocs.projects.feeds import LatestProjectsFeed, NewProjectsFeed
-
docs_urls = [
- url((r'^docs/(?P{project_slug})/page/'
- r'(?P{filename_slug})$'.format(**pattern_opts)),
+ url(
+ (
+ r'^docs/(?P{project_slug})/page/'
+ r'(?P{filename_slug})$'.format(**pattern_opts)
+ ),
serve.redirect_page_with_filename,
- name='docs_detail'),
-
- url((r'^docs/(?P{project_slug})/'
- r'(?:|projects/(?P{project_slug})/)$'.format(**pattern_opts)),
+ name='docs_detail',
+ ),
+ url(
+ (
+ r'^docs/(?P{project_slug})/'
+ r'(?:|projects/(?P{project_slug})/)$'.format(
+ **pattern_opts
+ )
+ ),
serve.redirect_project_slug,
- name='docs_detail'),
-
- url((r'^docs/(?P{project_slug})/'
- r'(?:|projects/(?P{project_slug})/)'
- r'(?P{lang_slug})/'
- r'(?P{version_slug})/'
- r'(?P{filename_slug})'.format(**pattern_opts)),
+ name='docs_detail',
+ ),
+ url(
+ (
+ r'^docs/(?P{project_slug})/'
+ r'(?:|projects/(?P{project_slug})/)'
+ r'(?P{lang_slug})/'
+ r'(?P{version_slug})/'
+ r'(?P{filename_slug})'.format(**pattern_opts)
+ ),
serve.serve_docs,
- name='docs_detail'),
+ name='docs_detail',
+ ),
]
-
core_urls = [
# Hooks
url(r'^github', hooks.github_build, name='github_build'),
url(r'^gitlab', hooks.gitlab_build, name='gitlab_build'),
url(r'^bitbucket', hooks.bitbucket_build, name='bitbucket_build'),
- url((r'^build/'
- r'(?P{project_slug})'.format(**pattern_opts)),
+ url(
+ (
+ r'^build/'
+ r'(?P{project_slug})'.format(**pattern_opts)
+ ),
hooks.generic_build,
- name='generic_build'),
+ name='generic_build',
+ ),
# Random other stuff
- url(r'^random/(?P{project_slug})'.format(**pattern_opts),
+ url(
+ r'^random/(?P{project_slug})'.format(**pattern_opts),
views.random_page,
- name='random_page'),
+ name='random_page',
+ ),
url(r'^random/$', views.random_page, name='random_page'),
- url((r'^wipe/(?P{project_slug})/'
- r'(?P{version_slug})/$'.format(**pattern_opts)),
+ url(
+ (
+ r'^wipe/(?P{project_slug})/'
+ r'(?P{version_slug})/$'.format(**pattern_opts)
+ ),
views.wipe_version,
- name='wipe_version'),
+ name='wipe_version',
+ ),
]
deprecated_urls = [
- url(r'^feeds/new/$',
+ url(
+ r'^feeds/new/$',
NewProjectsFeed(),
- name="new_feed"),
- url(r'^feeds/latest/$',
+ name='new_feed',
+ ),
+ url(
+ r'^feeds/latest/$',
LatestProjectsFeed(),
- name="latest_feed"),
+ name='latest_feed',
+ ),
]
diff --git a/readthedocs/core/urls/single_version.py b/readthedocs/core/urls/single_version.py
index afc84bda83f..253cae6a319 100644
--- a/readthedocs/core/urls/single_version.py
+++ b/readthedocs/core/urls/single_version.py
@@ -1,47 +1,59 @@
-"""URL configuration for a single version."""
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+"""URL configuration for a single version."""
from functools import reduce
from operator import add
-from django.conf.urls import url
from django.conf import settings
+from django.conf.urls import url
from django.conf.urls.static import static
from readthedocs.constants import pattern_opts
from readthedocs.core.views import serve
+
handler500 = 'readthedocs.core.views.server_error_500'
handler404 = 'readthedocs.core.views.server_error_404'
single_version_urls = [
- url(r'^(?:|projects/(?P{project_slug})/)'
+ url(
+ r'^(?:|projects/(?P{project_slug})/)'
r'page/(?P.*)$'.format(**pattern_opts),
serve.redirect_page_with_filename,
- name='docs_detail'),
-
- url((r'^(?:|projects/(?P{project_slug})/)'
- r'(?P{filename_slug})$'.format(**pattern_opts)),
+ name='docs_detail',
+ ),
+ url(
+ (
+ r'^(?:|projects/(?P{project_slug})/)'
+ r'(?P{filename_slug})$'.format(**pattern_opts)
+ ),
serve.serve_docs,
- name='docs_detail'),
+ name='docs_detail',
+ ),
]
groups = [single_version_urls]
# Needed to serve media locally
if getattr(settings, 'DEBUG', False):
- groups.insert(0, static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
+ groups.insert(
+ 0,
+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT),
+ )
# Allow `/docs/` URL's when not using subdomains or during local dev
if not getattr(settings, 'USE_SUBDOMAIN', False) or settings.DEBUG:
docs_url = [
- url((r'^docs/(?P[-\w]+)/'
- r'(?:|projects/(?P{project_slug})/)'
- r'(?P{filename_slug})$'.format(**pattern_opts)),
+ url(
+ (
+ r'^docs/(?P[-\w]+)/'
+ r'(?:|projects/(?P{project_slug})/)'
+ r'(?P{filename_slug})$'.format(**pattern_opts)
+ ),
serve.serve_docs,
- name='docs_detail')
+ name='docs_detail',
+ ),
]
groups.insert(1, docs_url)
-
urlpatterns = reduce(add, groups)
diff --git a/readthedocs/core/urls/subdomain.py b/readthedocs/core/urls/subdomain.py
index 23f1553245f..4e4a6775e3a 100644
--- a/readthedocs/core/urls/subdomain.py
+++ b/readthedocs/core/urls/subdomain.py
@@ -1,52 +1,63 @@
# -*- coding: utf-8 -*-
"""URL configurations for subdomains."""
-from __future__ import absolute_import
-
from functools import reduce
from operator import add
-from django.conf.urls import url
from django.conf import settings
+from django.conf.urls import url
from django.conf.urls.static import static
+from readthedocs.constants import pattern_opts
+from readthedocs.core.views import server_error_404, server_error_500
from readthedocs.core.views.serve import (
redirect_page_with_filename,
- redirect_project_slug, serve_docs, robots_txt,
-)
-from readthedocs.core.views import (
- server_error_500,
- server_error_404,
+ redirect_project_slug,
+ robots_txt,
+ serve_docs,
)
-from readthedocs.constants import pattern_opts
+
handler500 = server_error_500
handler404 = server_error_404
subdomain_urls = [
url(r'robots.txt$', robots_txt, name='robots_txt'),
-
- url(r'^(?:|projects/(?P{project_slug})/)'
+ url(
+ r'^(?:|projects/(?P{project_slug})/)'
r'page/(?P.*)$'.format(**pattern_opts),
redirect_page_with_filename,
- name='docs_detail'),
-
- url((r'^(?:|projects/(?P{project_slug})/)$').format(**pattern_opts),
+ name='docs_detail',
+ ),
+ url(
+ (r'^(?:|projects/(?P{project_slug})/)$').format(
+ **pattern_opts
+ ),
redirect_project_slug,
- name='redirect_project_slug'),
-
- url((r'^(?:|projects/(?P{project_slug})/)'
- r'(?P{lang_slug})/'
- r'(?P{version_slug})/'
- r'(?P{filename_slug})$'.format(**pattern_opts)),
+ name='redirect_project_slug',
+ ),
+ url(
+ (
+ r'^(?:|projects/(?P{project_slug})/)'
+ r'(?P{lang_slug})/'
+ r'(?P{version_slug})/'
+ r'(?P{filename_slug})$'.format(**pattern_opts)
+ ),
serve_docs,
- name='docs_detail'),
+ name='docs_detail',
+ ),
]
groups = [subdomain_urls]
# Needed to serve media locally
if getattr(settings, 'DEBUG', False):
- groups.insert(0, static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
+ groups.insert(
+ 0,
+ static(
+ settings.MEDIA_URL,
+ document_root=settings.MEDIA_ROOT,
+ ),
+ )
urlpatterns = reduce(add, groups)
diff --git a/readthedocs/core/utils/__init__.py b/readthedocs/core/utils/__init__.py
index 1927c98b74f..2123ce5a326 100644
--- a/readthedocs/core/utils/__init__.py
+++ b/readthedocs/core/utils/__init__.py
@@ -11,7 +11,6 @@
import re
from django.conf import settings
-from django.utils import six
from django.utils.functional import allow_lazy
from django.utils.safestring import SafeText, mark_safe
from django.utils.text import slugify as slugify_base
@@ -20,7 +19,6 @@
from readthedocs.builds.constants import LATEST, BUILD_STATE_TRIGGERED
from readthedocs.doc_builder.constants import DOCKER_LIMITS
-
log = logging.getLogger(__name__)
SYNC_USER = getattr(settings, 'SYNC_USER', getpass.getuser())
@@ -221,7 +219,7 @@ def slugify(value, *args, **kwargs):
return value
-slugify = allow_lazy(slugify, six.text_type, SafeText)
+slugify = allow_lazy(slugify, str, SafeText)
def safe_makedirs(directory_name):
diff --git a/readthedocs/core/utils/extend.py b/readthedocs/core/utils/extend.py
index 567c0c23a4d..a74b1175835 100644
--- a/readthedocs/core/utils/extend.py
+++ b/readthedocs/core/utils/extend.py
@@ -1,11 +1,11 @@
+# -*- coding: utf-8 -*-
+
"""Patterns for extending Read the Docs."""
-from __future__ import absolute_import
import inspect
from django.conf import settings
from django.utils.module_loading import import_string
-import six
def get_override_class(proxy_class, default_class=None):
@@ -21,7 +21,7 @@ def get_override_class(proxy_class, default_class=None):
default_class = getattr(proxy_class, '_default_class')
class_id = '.'.join([
inspect.getmodule(proxy_class).__name__,
- proxy_class.__name__
+ proxy_class.__name__,
])
class_path = getattr(settings, 'CLASS_OVERRIDES', {}).get(class_id)
# pylint: disable=protected-access
@@ -34,14 +34,18 @@ def get_override_class(proxy_class, default_class=None):
class SettingsOverrideMeta(type):
- """Meta class for passing along classmethod class to the underlying class.""" # noqa
+ """
+ Meta class to manage our Setting configurations.
+
+ Meta class for passing along classmethod class to the underlying class.
+ """
def __getattr__(cls, attr): # noqa: pep8 false positive
proxy_class = get_override_class(cls, getattr(cls, '_default_class'))
return getattr(proxy_class, attr)
-class SettingsOverrideObject(six.with_metaclass(SettingsOverrideMeta, object)):
+class SettingsOverrideObject(metaclass=SettingsOverrideMeta):
"""
Base class for creating class that can be overridden.
diff --git a/readthedocs/core/utils/tasks/__init__.py b/readthedocs/core/utils/tasks/__init__.py
index 344215036f9..7aede3ac1dc 100644
--- a/readthedocs/core/utils/tasks/__init__.py
+++ b/readthedocs/core/utils/tasks/__init__.py
@@ -1,4 +1,6 @@
-"""Common task exports"""
+# -*- coding: utf-8 -*-
+
+"""Common task exports."""
from .permission_checks import user_id_matches # noqa for unused import
from .public import PublicTask # noqa
diff --git a/readthedocs/core/utils/tasks/permission_checks.py b/readthedocs/core/utils/tasks/permission_checks.py
index 1643e866015..84f545830e1 100644
--- a/readthedocs/core/utils/tasks/permission_checks.py
+++ b/readthedocs/core/utils/tasks/permission_checks.py
@@ -1,4 +1,6 @@
-"""Permission checks for tasks"""
+# -*- coding: utf-8 -*-
+
+"""Permission checks for tasks."""
__all__ = ('user_id_matches',)
diff --git a/readthedocs/core/utils/tasks/public.py b/readthedocs/core/utils/tasks/public.py
index 9fb2948ef71..8b8f2421ac7 100644
--- a/readthedocs/core/utils/tasks/public.py
+++ b/readthedocs/core/utils/tasks/public.py
@@ -1,22 +1,19 @@
-"""Celery tasks with publicly viewable status"""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Celery tasks with publicly viewable status."""
from celery import Task, states
from django.conf import settings
from .retrieve import TaskNotFound, get_task_data
+
__all__ = (
- 'PublicTask', 'TaskNoPermission', 'get_public_task_data'
+ 'PublicTask',
+ 'TaskNoPermission',
+ 'get_public_task_data',
)
-
STATUS_UPDATES_ENABLED = not getattr(settings, 'CELERY_ALWAYS_EAGER', False)
@@ -51,7 +48,7 @@ def update_progress_data(self):
def set_permission_context(self, context):
"""
- Set data that can be used by ``check_permission`` to authorize a
+ Set data that can be used by ``check_permission`` to authorize a.
request for the this task. By default it will be the ``kwargs`` passed
into the task.
@@ -109,22 +106,26 @@ def permission_check(check):
def my_public_task(user_id):
pass
"""
+
def decorator(func):
func.check_permission = check
return func
+
return decorator
class TaskNoPermission(Exception):
+
def __init__(self, task_id, *args, **kwargs):
message = 'No permission to access task with id {id}'.format(
- id=task_id)
- super(TaskNoPermission, self).__init__(message, *args, **kwargs)
+ id=task_id,
+ )
+ super().__init__(message, *args, **kwargs)
def get_public_task_data(request, task_id):
"""
- Return task details as tuple
+ Return task details as tuple.
Will raise `TaskNoPermission` if `request` has no permission to access info
of the task with id `task_id`. This is also the case of no task with the
diff --git a/readthedocs/core/utils/tasks/retrieve.py b/readthedocs/core/utils/tasks/retrieve.py
index c96b7823706..9281ad8a1af 100644
--- a/readthedocs/core/utils/tasks/retrieve.py
+++ b/readthedocs/core/utils/tasks/retrieve.py
@@ -1,27 +1,24 @@
-"""Utilities for retrieving task data."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Utilities for retrieving task data."""
from celery import states
from celery.result import AsyncResult
+
__all__ = ('TaskNotFound', 'get_task_data')
class TaskNotFound(Exception):
+
def __init__(self, task_id, *args, **kwargs):
message = 'No public task found with id {id}'.format(id=task_id)
- super(TaskNotFound, self).__init__(message, *args, **kwargs)
+ super().__init__(message, *args, **kwargs)
def get_task_data(task_id):
"""
- Will raise `TaskNotFound` if the task is in state ``PENDING`` or the task
+ Will raise `TaskNotFound` if the task is in state ``PENDING`` or the task.
meta data has no ``'task_name'`` key set.
"""
diff --git a/readthedocs/core/views/__init__.py b/readthedocs/core/views/__init__.py
index f3c67b56b64..fd33e5a9e37 100644
--- a/readthedocs/core/views/__init__.py
+++ b/readthedocs/core/views/__init__.py
@@ -35,7 +35,7 @@ class HomepageView(TemplateView):
def get_context_data(self, **kwargs):
"""Add latest builds and featured projects."""
- context = super(HomepageView, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['featured_list'] = Project.objects.filter(featured=True)
context['projects_count'] = Project.objects.count()
return context
@@ -45,7 +45,7 @@ class SupportView(TemplateView):
template_name = 'support.html'
def get_context_data(self, **kwargs):
- context = super(SupportView, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
support_email = getattr(settings, 'SUPPORT_EMAIL', None)
if not support_email:
support_email = 'support@{domain}'.format(
@@ -133,13 +133,15 @@ def do_not_track(request):
dnt_header = request.META.get('HTTP_DNT')
# https://w3c.github.io/dnt/drafts/tracking-dnt.html#status-representation
- return JsonResponse({ # pylint: disable=redundant-content-type-for-json-response
- 'policy': 'https://docs.readthedocs.io/en/latest/privacy-policy.html',
- 'same-party': [
- 'readthedocs.org',
- 'readthedocs.com',
- 'readthedocs.io', # .org Documentation Sites
- 'readthedocs-hosted.com', # .com Documentation Sites
- ],
- 'tracking': 'N' if dnt_header == '1' else 'T',
- }, content_type='application/tracking-status+json')
+ return JsonResponse( # pylint: disable=redundant-content-type-for-json-response
+ {
+ 'policy': 'https://docs.readthedocs.io/en/latest/privacy-policy.html',
+ 'same-party': [
+ 'readthedocs.org',
+ 'readthedocs.com',
+ 'readthedocs.io', # .org Documentation Sites
+ 'readthedocs-hosted.com', # .com Documentation Sites
+ ],
+ 'tracking': 'N' if dnt_header == '1' else 'T',
+ }, content_type='application/tracking-status+json',
+ )
diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py
index c6d4bc91188..3966ba62b2a 100644
--- a/readthedocs/core/views/hooks.py
+++ b/readthedocs/core/views/hooks.py
@@ -1,11 +1,6 @@
-"""Views pertaining to builds."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Views pertaining to builds."""
import json
import logging
@@ -21,6 +16,7 @@
from readthedocs.projects.models import Feature, Project
from readthedocs.projects.tasks import sync_repository_task
+
log = logging.getLogger(__name__)
@@ -47,13 +43,14 @@ def _build_version(project, slug, already_built=()):
version = project.versions.filter(active=True, slug=slug).first()
if version and slug not in already_built:
log.info(
- "(Version build) Building %s:%s",
- project.slug, version.slug,
+ '(Version build) Building %s:%s',
+ project.slug,
+ version.slug,
)
trigger_build(project=project, version=version, force=True)
return slug
- log.info("(Version build) Not Building %s", slug)
+ log.info('(Version build) Not Building %s', slug)
return None
@@ -70,8 +67,11 @@ def build_branches(project, branch_list):
for branch in branch_list:
versions = project.versions_from_branch_name(branch)
for version in versions:
- log.info("(Branch Build) Processing %s:%s",
- project.slug, version.slug)
+ log.info(
+ '(Branch Build) Processing %s:%s',
+ project.slug,
+ version.slug,
+ )
ret = _build_version(project, version.slug, already_built=to_build)
if ret:
to_build.add(ret)
@@ -95,9 +95,9 @@ def sync_versions(project):
try:
version_identifier = project.get_default_branch()
version = (
- project.versions
- .filter(identifier=version_identifier)
- .first()
+ project.versions.filter(
+ identifier=version_identifier,
+ ).first()
)
if not version:
log.info('Unable to sync from %s version', version_identifier)
@@ -120,10 +120,13 @@ def get_project_from_url(url):
def log_info(project, msg):
- log.info(constants.LOG_TEMPLATE
- .format(project=project,
- version='',
- msg=msg))
+ log.info(
+ constants.LOG_TEMPLATE.format(
+ project=project,
+ version='',
+ msg=msg,
+ ),
+ )
def _build_url(url, projects, branches):
@@ -133,7 +136,7 @@ def _build_url(url, projects, branches):
Check each of the ``branches`` to see if they are active and should be
built.
"""
- ret = ""
+ ret = ''
all_built = {}
all_not_building = {}
@@ -156,15 +159,19 @@ def _build_url(url, projects, branches):
for project_slug, built in list(all_built.items()):
if built:
- msg = '(URL Build) Build Started: %s [%s]' % (
- url, ' '.join(built))
+ msg = '(URL Build) Build Started: {} [{}]'.format(
+ url,
+ ' '.join(built),
+ )
log_info(project_slug, msg=msg)
ret += msg
for project_slug, not_building in list(all_not_building.items()):
if not_building:
- msg = '(URL Build) Not Building: %s [%s]' % (
- url, ' '.join(not_building))
+ msg = '(URL Build) Not Building: {} [{}]'.format(
+ url,
+ ' '.join(not_building),
+ )
log_info(project_slug, msg=msg)
ret += msg
@@ -211,14 +218,14 @@ def github_build(request): # noqa: D205
log.info(
'GitHub webhook search: url=%s branches=%s',
http_search_url,
- branches
+ branches,
)
ssh_projects = get_project_from_url(ssh_search_url)
if ssh_projects:
log.info(
'GitHub webhook search: url=%s branches=%s',
ssh_search_url,
- branches
+ branches,
)
projects = repo_projects | ssh_projects
return _build_url(http_search_url, projects, branches)
@@ -293,24 +300,24 @@ def bitbucket_build(request):
else:
data = json.loads(request.body)
- version = 2 if request.META.get('HTTP_USER_AGENT') == 'Bitbucket-Webhooks/2.0' else 1
+ version = 2 if request.META.get('HTTP_USER_AGENT') == 'Bitbucket-Webhooks/2.0' else 1 # yapf: disabled # noqa
if version == 1:
- branches = [commit.get('branch', '')
- for commit in data['commits']]
+ branches = [
+ commit.get('branch', '') for commit in data['commits']
+ ]
repository = data['repository']
if not repository['absolute_url']:
return HttpResponse('Invalid request', status=400)
- search_url = 'bitbucket.org{0}'.format(
- repository['absolute_url'].rstrip('/')
+ search_url = 'bitbucket.org{}'.format(
+ repository['absolute_url'].rstrip('/'),
)
elif version == 2:
changes = data['push']['changes']
- branches = [change['new']['name']
- for change in changes]
+ branches = [change['new']['name'] for change in changes]
if not data['repository']['full_name']:
return HttpResponse('Invalid request', status=400)
- search_url = 'bitbucket.org/{0}'.format(
- data['repository']['full_name']
+ search_url = 'bitbucket.org/{}'.format(
+ data['repository']['full_name'],
)
except (TypeError, ValueError, KeyError):
log.exception('Invalid Bitbucket webhook payload')
@@ -358,10 +365,12 @@ def generic_build(request, project_id_or_slug=None):
project = Project.objects.get(slug=project_id_or_slug)
except (Project.DoesNotExist, ValueError):
log.exception(
- "(Incoming Generic Build) Repo not found: %s",
- project_id_or_slug)
+ '(Incoming Generic Build) Repo not found: %s',
+ project_id_or_slug,
+ )
return HttpResponseNotFound(
- 'Repo not found: %s' % project_id_or_slug)
+ 'Repo not found: %s' % project_id_or_slug,
+ )
# This endpoint doesn't require authorization, we shouldn't allow builds to
# be triggered from this any longer. Deprecation plan is to selectively
# allow access to this endpoint for now.
@@ -370,11 +379,11 @@ def generic_build(request, project_id_or_slug=None):
if request.method == 'POST':
slug = request.POST.get('version_slug', project.default_version)
log.info(
- "(Incoming Generic Build) %s [%s]",
+ '(Incoming Generic Build) %s [%s]',
project.slug,
slug,
)
_build_version(project, slug)
else:
- return HttpResponse("You must POST to this resource.")
+ return HttpResponse('You must POST to this resource.')
return redirect('builds_project_list', project.slug)
diff --git a/readthedocs/core/views/serve.py b/readthedocs/core/views/serve.py
index 2b24b45d95c..a1f65a11ea5 100644
--- a/readthedocs/core/views/serve.py
+++ b/readthedocs/core/views/serve.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+
"""
Doc serving from Python.
@@ -25,18 +26,14 @@
SERVE_DOCS (['private']) - The list of ['private', 'public'] docs to serve.
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import logging
import mimetypes
import os
from functools import wraps
from django.conf import settings
-from django.http import HttpResponse, HttpResponseRedirect, Http404
-from django.shortcuts import get_object_or_404
-from django.shortcuts import render
+from django.http import Http404, HttpResponse, HttpResponseRedirect
+from django.shortcuts import get_object_or_404, render
from django.utils.encoding import iri_to_uri
from django.views.static import serve
@@ -47,6 +44,7 @@
from readthedocs.projects import constants
from readthedocs.projects.models import Project, ProjectRelationship
+
log = logging.getLogger(__name__)
@@ -58,8 +56,11 @@ def map_subproject_slug(view_func):
.. warning:: Does not take into account any kind of privacy settings.
"""
+
@wraps(view_func)
- def inner_view(request, subproject=None, subproject_slug=None, *args, **kwargs): # noqa
+ def inner_view( # noqa
+ request, subproject=None, subproject_slug=None, *args, **kwargs,
+ ):
if subproject is None and subproject_slug:
# Try to fetch by subproject alias first, otherwise we might end up
# redirected to an unrelated project.
@@ -85,8 +86,11 @@ def map_project_slug(view_func):
.. warning:: Does not take into account any kind of privacy settings.
"""
+
@wraps(view_func)
- def inner_view(request, project=None, project_slug=None, *args, **kwargs): # noqa
+ def inner_view( # noqa
+ request, project=None, project_slug=None, *args, **kwargs
+ ):
if project is None:
if not project_slug:
project_slug = request.slug
@@ -111,13 +115,14 @@ def redirect_project_slug(request, project, subproject): # pylint: disable=unus
def redirect_page_with_filename(request, project, subproject, filename): # pylint: disable=unused-argument # noqa
"""Redirect /page/file.html to /en/latest/file.html."""
return HttpResponseRedirect(
- resolve(subproject or project, filename=filename))
+ resolve(subproject or project, filename=filename),
+ )
def _serve_401(request, project):
res = render(request, '401.html')
res.status_code = 401
- log.debug('Unauthorized access to {0} documentation'.format(project.slug))
+ log.debug('Unauthorized access to {} documentation'.format(project.slug))
return res
@@ -129,7 +134,8 @@ def _serve_file(request, filename, basepath):
# Serve from Nginx
content_type, encoding = mimetypes.guess_type(
- os.path.join(basepath, filename))
+ os.path.join(basepath, filename),
+ )
content_type = content_type or 'application/octet-stream'
response = HttpResponse(content_type=content_type)
if encoding:
@@ -155,9 +161,14 @@ def _serve_file(request, filename, basepath):
@map_project_slug
@map_subproject_slug
def serve_docs(
- request, project, subproject, lang_slug=None, version_slug=None,
- filename=''):
- """Exists to map existing proj, lang, version, filename views to the file format."""
+ request,
+ project,
+ subproject,
+ lang_slug=None,
+ version_slug=None,
+ filename='',
+):
+ """Map existing proj, lang, version, filename views to the file format."""
if not version_slug:
version_slug = project.get_default_version()
try:
@@ -222,7 +233,8 @@ def _serve_symlink_docs(request, project, privacy_level, filename=''):
files_tried.append(os.path.join(basepath, filename))
raise Http404(
- 'File not found. Tried these files: %s' % ','.join(files_tried))
+ 'File not found. Tried these files: %s' % ','.join(files_tried),
+ )
@map_project_slug
diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py
index 67df595bd33..201bd039368 100644
--- a/readthedocs/doc_builder/backends/mkdocs.py
+++ b/readthedocs/doc_builder/backends/mkdocs.py
@@ -1,11 +1,10 @@
+# -*- coding: utf-8 -*-
+
"""
MkDocs_ backend for building docs.
.. _MkDocs: http://www.mkdocs.org/
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import json
import logging
import os
@@ -18,6 +17,7 @@
from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError
from readthedocs.projects.models import Feature
+
log = logging.getLogger(__name__)
@@ -44,10 +44,11 @@ class BaseMkdocs(BaseBuilder):
DEFAULT_THEME_NAME = 'mkdocs'
def __init__(self, *args, **kwargs):
- super(BaseMkdocs, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.old_artifact_path = os.path.join(
self.version.project.checkout_path(self.version.slug),
- self.build_dir)
+ self.build_dir,
+ )
self.root_path = self.version.project.checkout_path(self.version.slug)
self.yaml_file = self.get_yaml_config()
@@ -67,14 +68,13 @@ def __init__(self, *args, **kwargs):
else:
self.DEFAULT_THEME_NAME = 'mkdocs'
-
def get_yaml_config(self):
"""Find the ``mkdocs.yml`` file in the project root."""
mkdoc_path = self.config.mkdocs.configuration
if not mkdoc_path:
mkdoc_path = os.path.join(
self.project.checkout_path(self.version.slug),
- 'mkdocs.yml'
+ 'mkdocs.yml',
)
if not os.path.exists(mkdoc_path):
return None
@@ -87,9 +87,7 @@ def load_yaml_config(self):
:raises: ``MkDocsYAMLParseError`` if failed due to syntax errors.
"""
try:
- return yaml.safe_load(
- open(self.yaml_file, 'r')
- )
+ return yaml.safe_load(open(self.yaml_file, 'r'),)
except IOError:
return {
'site_name': self.version.project.name,
@@ -98,10 +96,13 @@ def load_yaml_config(self):
note = ''
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
- note = ' (line %d, column %d)' % (mark.line + 1, mark.column + 1)
+ note = ' (line %d, column %d)' % (
+ mark.line + 1,
+ mark.column + 1,
+ )
raise MkDocsYAMLParseError(
'Your mkdocs.yml could not be loaded, '
- 'possibly due to a syntax error{note}'.format(note=note)
+ 'possibly due to a syntax error{note}'.format(note=note),
)
def append_conf(self, **__):
@@ -153,13 +154,13 @@ def append_conf(self, **__):
# of the mkdocs configuration file.
docs_path = os.path.join(
os.path.dirname(self.yaml_file),
- docs_dir
+ docs_dir,
)
# RTD javascript writing
rtd_data = self.generate_rtd_data(
docs_dir=os.path.relpath(docs_path, self.root_path),
- mkdocs_config=user_config
+ mkdocs_config=user_config,
)
with open(os.path.join(docs_path, 'readthedocs-data.js'), 'w') as f:
f.write(rtd_data)
@@ -178,7 +179,7 @@ def append_conf(self, **__):
# Write the modified mkdocs configuration
yaml.safe_dump(
user_config,
- open(self.yaml_file, 'w')
+ open(self.yaml_file, 'w'),
)
# Write the mkdocs.yml to the build logs
@@ -205,13 +206,21 @@ def generate_rtd_data(self, docs_dir, mkdocs_config):
'programming_language': self.version.project.programming_language,
'page': None,
'theme': self.get_theme_name(mkdocs_config),
- 'builder': "mkdocs",
+ 'builder': 'mkdocs',
'docroot': docs_dir,
- 'source_suffix': ".md",
- 'api_host': getattr(settings, 'PUBLIC_API_URL', 'https://readthedocs.org'),
+ 'source_suffix': '.md',
+ 'api_host': getattr(
+ settings,
+ 'PUBLIC_API_URL',
+ 'https://readthedocs.org',
+ ),
'ad_free': not self.project.show_advertising,
'commit': self.version.project.vcs_repo(self.version.slug).commit,
- 'global_analytics_code': getattr(settings, 'GLOBAL_ANALYTICS_CODE', 'UA-17997319-1'),
+ 'global_analytics_code': getattr(
+ settings,
+ 'GLOBAL_ANALYTICS_CODE',
+ 'UA-17997319-1',
+ ),
'user_analytics_code': analytics_code,
}
data_json = json.dumps(readthedocs_data, indent=4)
@@ -232,21 +241,22 @@ def build(self):
self.python_env.venv_bin(filename='mkdocs'),
self.builder,
'--clean',
- '--site-dir', self.build_dir,
- '--config-file', self.yaml_file,
+ '--site-dir',
+ self.build_dir,
+ '--config-file',
+ self.yaml_file,
]
if self.config.mkdocs.fail_on_warning:
build_command.append('--strict')
cmd_ret = self.run(
- *build_command,
- cwd=checkout_path,
+ *build_command, cwd=checkout_path,
bin_path=self.python_env.venv_bin()
)
return cmd_ret.successful
def get_theme_name(self, mkdocs_config):
"""
- Get the theme configuration in the mkdocs_config
+ Get the theme configuration in the mkdocs_config.
In v0.17.0, the theme configuration switched
from two separate configs (both optional) to a nested directive.
diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py
index 1d280b8a318..c5a664835fa 100644
--- a/readthedocs/doc_builder/backends/sphinx.py
+++ b/readthedocs/doc_builder/backends/sphinx.py
@@ -1,30 +1,27 @@
# -*- coding: utf-8 -*-
+
"""
Sphinx_ backend for building docs.
.. _Sphinx: http://www.sphinx-doc.org/
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import codecs
-import shutil
import logging
import os
+import shutil
import sys
import zipfile
from glob import glob
-import six
from django.conf import settings
from django.template import loader as template_loader
from django.template.loader import render_to_string
from readthedocs.builds import utils as version_utils
from readthedocs.projects.exceptions import ProjectConfigurationError
+from readthedocs.projects.models import Feature
from readthedocs.projects.utils import safe_write
from readthedocs.restapi.client import api
-from readthedocs.projects.models import Feature
from ..base import BaseBuilder, restoring_chdir
from ..constants import PDF_RE
@@ -32,6 +29,7 @@
from ..exceptions import BuildEnvironmentError
from ..signals import finalize_sphinx_context_data
+
log = logging.getLogger(__name__)
@@ -40,14 +38,14 @@ class BaseSphinx(BaseBuilder):
"""The parent for most sphinx builders."""
def __init__(self, *args, **kwargs):
- super(BaseSphinx, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.config_file = self.config.sphinx.configuration
try:
if not self.config_file:
self.config_file = self.project.conf_file(self.version.slug)
self.old_artifact_path = os.path.join(
os.path.dirname(self.config_file),
- self.sphinx_build_dir
+ self.sphinx_build_dir,
)
except ProjectConfigurationError:
docs_dir = self.docs_dir()
@@ -60,11 +58,13 @@ def _write_config(self, master_doc='index'):
"""Create ``conf.py`` if it doesn't exist."""
docs_dir = self.docs_dir()
conf_template = render_to_string(
- 'sphinx/conf.py.conf', {
+ 'sphinx/conf.py.conf',
+ {
'project': self.project,
'version': self.version,
'master_doc': master_doc,
- })
+ },
+ )
conf_file = os.path.join(docs_dir, 'conf.py')
safe_write(conf_file, conf_template)
@@ -76,25 +76,28 @@ def get_config_params(self):
os.path.dirname(
os.path.relpath(
self.config_file,
- self.project.checkout_path(self.version.slug)
- )
+ self.project.checkout_path(self.version.slug),
+ ),
),
'',
)
remote_version = self.version.commit_name
github_user, github_repo = version_utils.get_github_username_repo(
- url=self.project.repo)
+ url=self.project.repo,
+ )
github_version_is_editable = (self.version.type == 'branch')
display_github = github_user is not None
bitbucket_user, bitbucket_repo = version_utils.get_bitbucket_username_repo( # noqa
- url=self.project.repo)
+ url=self.project.repo,
+ )
bitbucket_version_is_editable = (self.version.type == 'branch')
display_bitbucket = bitbucket_user is not None
gitlab_user, gitlab_repo = version_utils.get_gitlab_username_repo(
- url=self.project.repo)
+ url=self.project.repo,
+ )
gitlab_version_is_editable = (self.version.type == 'branch')
display_gitlab = gitlab_user is not None
@@ -146,7 +149,7 @@ def get_config_params(self):
# Features
'dont_overwrite_sphinx_context': self.project.has_feature(
- Feature.DONT_OVERWRITE_SPHINX_CONTEXT
+ Feature.DONT_OVERWRITE_SPHINX_CONTEXT,
),
}
@@ -159,26 +162,25 @@ def get_config_params(self):
return data
def append_conf(self, **__):
- """Find or create a ``conf.py`` with a rendered ``doc_builder/conf.py.tmpl`` appended"""
+ """
+ Find or create a ``conf.py`` and appends default content.
+
+ The default content is rendered from ``doc_builder/conf.py.tmpl``.
+ """
if self.config_file is None:
master_doc = self.create_index(extension='rst')
self._write_config(master_doc=master_doc)
try:
self.config_file = (
- self.config_file or
- self.project.conf_file(self.version.slug)
+ self.config_file or self.project.conf_file(self.version.slug)
)
outfile = codecs.open(self.config_file, encoding='utf-8', mode='a')
except (ProjectConfigurationError, IOError):
trace = sys.exc_info()[2]
- six.reraise(
- ProjectConfigurationError,
- ProjectConfigurationError(
- ProjectConfigurationError.NOT_FOUND
- ),
- trace
- )
+ raise ProjectConfigurationError(
+ ProjectConfigurationError.NOT_FOUND,
+ ).with_traceback(trace)
# Append config to project conf file
tmpl = template_loader.get_template('doc_builder/conf.py.tmpl')
@@ -222,8 +224,7 @@ def build(self):
self.sphinx_build_dir,
])
cmd_ret = self.run(
- *build_command,
- cwd=os.path.dirname(self.config_file),
+ *build_command, cwd=os.path.dirname(self.config_file),
bin_path=self.python_env.venv_bin()
)
return cmd_ret.successful
@@ -234,18 +235,19 @@ class HtmlBuilder(BaseSphinx):
sphinx_build_dir = '_build/html'
def __init__(self, *args, **kwargs):
- super(HtmlBuilder, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocs'
def move(self, **__):
- super(HtmlBuilder, self).move()
+ super().move()
# Copy JSON artifacts to its own directory
# to keep compatibility with the older builder.
json_path = os.path.abspath(
- os.path.join(self.old_artifact_path, '..', 'json')
+ os.path.join(self.old_artifact_path, '..', 'json'),
)
json_path_target = self.project.artifact_path(
- version=self.version.slug, type_='sphinx_search'
+ version=self.version.slug,
+ type_='sphinx_search',
)
if os.path.exists(json_path):
if os.path.exists(json_path_target):
@@ -253,19 +255,17 @@ def move(self, **__):
log.info('Copying json on the local filesystem')
shutil.copytree(
json_path,
- json_path_target
+ json_path_target,
)
else:
- log.warning(
- 'Not moving json because the build dir is unknown.'
- )
+ log.warning('Not moving json because the build dir is unknown.',)
class HtmlDirBuilder(HtmlBuilder):
type = 'sphinx_htmldir'
def __init__(self, *args, **kwargs):
- super(HtmlDirBuilder, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocsdirhtml'
@@ -273,7 +273,7 @@ class SingleHtmlBuilder(HtmlBuilder):
type = 'sphinx_singlehtml'
def __init__(self, *args, **kwargs):
- super(SingleHtmlBuilder, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocssinglehtml'
@@ -304,7 +304,8 @@ def move(self, **__):
filename=to_write,
arcname=os.path.join(
'{}-{}'.format(self.project.slug, self.version.slug),
- to_write),
+ to_write,
+ ),
)
archive.close()
@@ -338,7 +339,7 @@ class LatexBuildCommand(BuildCommand):
"""Ignore LaTeX exit code if there was file output."""
def run(self):
- super(LatexBuildCommand, self).run()
+ super().run()
# Force LaTeX exit code to be a little more optimistic. If LaTeX
# reports an output file, let's just assume we're fine.
if PDF_RE.search(self.output):
@@ -350,7 +351,7 @@ class DockerLatexBuildCommand(DockerBuildCommand):
"""Ignore LaTeX exit code if there was file output."""
def run(self):
- super(DockerLatexBuildCommand, self).run()
+ super().run()
# Force LaTeX exit code to be a little more optimistic. If LaTeX
# reports an output file, let's just assume we're fine.
if PDF_RE.search(self.output):
@@ -393,11 +394,16 @@ def build(self):
# Run LaTeX -> PDF conversions
pdflatex_cmds = [
['pdflatex', '-interaction=nonstopmode', tex_file]
- for tex_file in tex_files] # yapf: disable
+ for tex_file in tex_files
+ ] # yapf: disable
makeindex_cmds = [
- ['makeindex', '-s', 'python.ist', '{0}.idx'.format(
- os.path.splitext(os.path.relpath(tex_file, latex_cwd))[0])]
- for tex_file in tex_files] # yapf: disable
+ [
+ 'makeindex', '-s', 'python.ist', '{}.idx'.format(
+ os.path.splitext(os.path.relpath(tex_file, latex_cwd))[0],
+ ),
+ ]
+ for tex_file in tex_files
+ ] # yapf: disable
if self.build_env.command_class == DockerBuildCommand:
latex_class = DockerLatexBuildCommand
@@ -406,15 +412,27 @@ def build(self):
pdf_commands = []
for cmd in pdflatex_cmds:
cmd_ret = self.build_env.run_command_class(
- cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
+ cls=latex_class,
+ cmd=cmd,
+ cwd=latex_cwd,
+ warn_only=True,
+ )
pdf_commands.append(cmd_ret)
for cmd in makeindex_cmds:
cmd_ret = self.build_env.run_command_class(
- cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
+ cls=latex_class,
+ cmd=cmd,
+ cwd=latex_cwd,
+ warn_only=True,
+ )
pdf_commands.append(cmd_ret)
for cmd in pdflatex_cmds:
cmd_ret = self.build_env.run_command_class(
- cls=latex_class, cmd=cmd, cwd=latex_cwd, warn_only=True)
+ cls=latex_class,
+ cmd=cmd,
+ cwd=latex_cwd,
+ warn_only=True,
+ )
pdf_match = PDF_RE.search(cmd_ret.output)
if pdf_match:
self.pdf_file_name = pdf_match.group(1).strip()
@@ -448,7 +466,9 @@ def move(self, **__):
from_file = None
if from_file:
to_file = os.path.join(
- self.target, '{}.pdf'.format(self.project.slug))
+ self.target,
+ '{}.pdf'.format(self.project.slug),
+ )
self.run(
'mv',
'-f',
diff --git a/readthedocs/doc_builder/base.py b/readthedocs/doc_builder/base.py
index 83aac0da617..6b143ca8db9 100644
--- a/readthedocs/doc_builder/base.py
+++ b/readthedocs/doc_builder/base.py
@@ -1,15 +1,13 @@
# -*- coding: utf-8 -*-
-"""Base classes for Builders."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Base classes for Builders."""
import logging
import os
import shutil
-from builtins import object
from functools import wraps
+
log = logging.getLogger(__name__)
@@ -26,7 +24,7 @@ def decorator(*args, **kw):
return decorator
-class BaseBuilder(object):
+class BaseBuilder:
"""
The Base for all Builders. Defines the API for subclasses.
@@ -49,7 +47,9 @@ def __init__(self, build_env, python_env, force=False):
self.config = python_env.config if python_env else None
self._force = force
self.target = self.project.artifact_path(
- version=self.version.slug, type_=self.type)
+ version=self.version.slug,
+ type_=self.type,
+ )
def force(self, **__):
"""An optional step to force a build even when nothing has changed."""
@@ -70,7 +70,7 @@ def move(self, **__):
shutil.copytree(
self.old_artifact_path,
self.target,
- ignore=shutil.ignore_patterns(*self.ignore_patterns)
+ ignore=shutil.ignore_patterns(*self.ignore_patterns),
)
else:
log.warning('Not moving docs, because the build dir is unknown.')
@@ -99,10 +99,14 @@ def create_index(self, extension='md', **__):
docs_dir = self.docs_dir()
index_filename = os.path.join(
- docs_dir, 'index.{ext}'.format(ext=extension))
+ docs_dir,
+ 'index.{ext}'.format(ext=extension),
+ )
if not os.path.exists(index_filename):
readme_filename = os.path.join(
- docs_dir, 'README.{ext}'.format(ext=extension))
+ docs_dir,
+ 'README.{ext}'.format(ext=extension),
+ )
if os.path.exists(readme_filename):
return 'README'
diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py
index 8b7e0181c56..9cf7f9d8abb 100644
--- a/readthedocs/doc_builder/config.py
+++ b/readthedocs/doc_builder/config.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""An API to load config from a readthedocs.yml file."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""An API to load config from a readthedocs.yml file."""
from os import path
@@ -33,7 +31,7 @@ def load_yaml_config(version):
try:
sphinx_configuration = path.join(
version.get_conf_py_path(),
- 'conf.py'
+ 'conf.py',
)
except ProjectConfigurationError:
sphinx_configuration = None
@@ -52,7 +50,7 @@ def load_yaml_config(version):
'sphinx_configuration': sphinx_configuration,
'build_image': project.container_image,
'doctype': project.documentation_type,
- }
+ },
}
img_settings = DOCKER_IMAGE_SETTINGS.get(img_name, None)
if img_settings:
diff --git a/readthedocs/doc_builder/constants.py b/readthedocs/doc_builder/constants.py
index 1cd1a5b1348..4f6deeb6174 100644
--- a/readthedocs/doc_builder/constants.py
+++ b/readthedocs/doc_builder/constants.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""Doc build constants."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Doc build constants."""
import logging
import os
@@ -10,6 +8,7 @@
from django.conf import settings
+
log = logging.getLogger(__name__)
MKDOCS_TEMPLATE_DIR = os.path.join(
@@ -33,7 +32,9 @@
old_config = getattr(settings, 'DOCKER_BUILD_IMAGES', None)
if old_config:
- log.warning('Old config detected, DOCKER_BUILD_IMAGES->DOCKER_IMAGE_SETTINGS')
+ log.warning(
+ 'Old config detected, DOCKER_BUILD_IMAGES->DOCKER_IMAGE_SETTINGS',
+ )
DOCKER_IMAGE_SETTINGS.update(old_config)
DOCKER_LIMITS = {'memory': '200m', 'time': 600}
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py
index 36fcdd2f48c..ddf0b471d7d 100644
--- a/readthedocs/doc_builder/environments.py
+++ b/readthedocs/doc_builder/environments.py
@@ -2,13 +2,6 @@
"""Documentation Builder Environments."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
import os
import re
@@ -18,8 +11,6 @@
import traceback
from datetime import datetime
-import six
-from builtins import object, str
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from docker import APIClient
@@ -94,9 +85,19 @@ class BuildCommand(BuildCommandResultMixin):
:param description: a more grokable description of the command being run
"""
- def __init__(self, command, cwd=None, shell=False, environment=None,
- combine_output=True, input_data=None, build_env=None,
- bin_path=None, description=None, record_as_success=False):
+ def __init__(
+ self,
+ command,
+ cwd=None,
+ shell=False,
+ environment=None,
+ combine_output=True,
+ input_data=None,
+ build_env=None,
+ bin_path=None,
+ description=None,
+ record_as_success=False,
+ ):
self.command = command
self.shell = shell
if cwd is None:
@@ -124,7 +125,7 @@ def __init__(self, command, cwd=None, shell=False, environment=None,
def __str__(self):
# TODO do we want to expose the full command here?
- output = u''
+ output = ''
if self.output is not None:
output = self.output.encode('utf-8')
return '\n'.join([self.get_command(), output])
@@ -179,7 +180,7 @@ def run(self):
if self.input_data is not None:
cmd_input = self.input_data
- if isinstance(cmd_input, six.string_types):
+ if isinstance(cmd_input, str):
cmd_input_bytes = cmd_input.encode('utf-8')
else:
cmd_input_bytes = cmd_input
@@ -311,11 +312,18 @@ def run(self):
# nicer. Sometimes the kernel kills the command and Docker doesn't
# not use the specific exit code, so we check if the word `Killed`
# is in the last 15 lines of the command's output
- killed_in_output = 'Killed' in '\n'.join(self.output.splitlines()[-15:])
- if self.exit_code == DOCKER_OOM_EXIT_CODE or (self.exit_code == 1 and killed_in_output):
- self.output += str(_(
- '\n\nCommand killed due to excessive memory consumption\n'
- ))
+ killed_in_output = 'Killed' in '\n'.join(
+ self.output.splitlines()[-15:],
+ )
+ if self.exit_code == DOCKER_OOM_EXIT_CODE or (
+ self.exit_code == 1 and
+ killed_in_output
+ ):
+ self.output += str(
+ _(
+ '\n\nCommand killed due to excessive memory consumption\n',
+ ),
+ )
except DockerAPIError:
self.exit_code = -1
if self.output is None or not self.output:
@@ -333,20 +341,28 @@ def get_wrapped_command(self):
install requests<0.8``. This escapes a good majority of those
characters.
"""
- bash_escape_re = re.compile(r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@"
- r"\[\\\]\^\`\{\|\}\~])")
+ bash_escape_re = re.compile(
+ r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@"
+ r'\[\\\]\^\`\{\|\}\~])',
+ )
prefix = ''
if self.bin_path:
- prefix += 'PATH={0}:$PATH '.format(self.bin_path)
- return ("/bin/sh -c 'cd {cwd} && {prefix}{cmd}'"
- .format(
- cwd=self.cwd,
- prefix=prefix,
- cmd=(' '.join([bash_escape_re.sub(r'\\\1', part)
- for part in self.command]))))
+ prefix += 'PATH={}:$PATH '.format(self.bin_path)
+ return (
+ "/bin/sh -c 'cd {cwd} && {prefix}{cmd}'".format(
+ cwd=self.cwd,
+ prefix=prefix,
+ cmd=(
+ ' '.join([
+ bash_escape_re.sub(r'\\\1', part)
+ for part in self.command
+ ])
+ ),
+ )
+ )
-class BaseEnvironment(object):
+class BaseEnvironment:
"""
Base environment class.
@@ -369,7 +385,8 @@ def run(self, *cmd, **kwargs):
def run_command_class(
self, cls, cmd, record=None, warn_only=False,
- record_as_success=False, **kwargs):
+ record_as_success=False, **kwargs
+ ):
"""
Run command from this environment.
@@ -419,17 +436,19 @@ def run_command_class(
self.commands.append(build_cmd)
if build_cmd.failed:
- msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command())
+ msg = 'Command {cmd} failed'.format(cmd=build_cmd.get_command())
if build_cmd.output:
- msg += u':\n{out}'.format(out=build_cmd.output)
+ msg += ':\n{out}'.format(out=build_cmd.output)
if warn_only:
- log.warning(LOG_TEMPLATE.format(
- project=self.project.slug,
- version='latest',
- msg=msg,
- ))
+ log.warning(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version='latest',
+ msg=msg,
+ ),
+ )
else:
raise BuildEnvironmentWarning(msg)
return build_cmd
@@ -485,9 +504,17 @@ class BuildEnvironment(BaseEnvironment):
MkDocsYAMLParseError,
)
- def __init__(self, project=None, version=None, build=None, config=None,
- record=True, environment=None, update_on_success=True):
- super(BuildEnvironment, self).__init__(project, environment)
+ def __init__(
+ self,
+ project=None,
+ version=None,
+ build=None,
+ config=None,
+ record=True,
+ environment=None,
+ update_on_success=True,
+ ):
+ super().__init__(project, environment)
self.version = version
self.build = build
self.config = config
@@ -563,33 +590,39 @@ def run(self, *cmd, **kwargs):
kwargs.update({
'build_env': self,
})
- return super(BuildEnvironment, self).run(*cmd, **kwargs)
+ return super().run(*cmd, **kwargs)
def run_command_class(self, *cmd, **kwargs): # pylint: disable=arguments-differ
kwargs.update({
'build_env': self,
})
- return super(BuildEnvironment, self).run_command_class(*cmd, **kwargs)
+ return super().run_command_class(*cmd, **kwargs)
@property
def successful(self):
- """Is build completed, without top level failures or failing commands.""" # noqa
- return (self.done and self.failure is None and
- all(cmd.successful for cmd in self.commands))
+ """Build completed, without top level failures or failing commands."""
+ return (
+ self.done and self.failure is None and
+ all(cmd.successful for cmd in self.commands)
+ )
@property
def failed(self):
"""Is build completed, but has top level failure or failing commands."""
- return (self.done and (
- self.failure is not None or
- any(cmd.failed for cmd in self.commands)
- ))
+ return (
+ self.done and (
+ self.failure is not None or
+ any(cmd.failed for cmd in self.commands)
+ )
+ )
@property
def done(self):
"""Is build in finished state."""
- return (self.build is not None and
- self.build['state'] == BUILD_STATE_FINISHED)
+ return (
+ self.build is not None and
+ self.build['state'] == BUILD_STATE_FINISHED
+ )
def update_build(self, state=None):
"""
@@ -635,13 +668,15 @@ def update_build(self, state=None):
if self.failure is not None:
# Surface a generic error if the class is not a
# BuildEnvironmentError
+ # yapf: disable
if not isinstance(
- self.failure,
- (
- BuildEnvironmentException,
- BuildEnvironmentWarning,
- ),
+ self.failure,
+ (
+ BuildEnvironmentException,
+ BuildEnvironmentWarning,
+ ),
):
+ # yapf: enable
log.error(
'Build failed with unhandled exception: %s',
str(self.failure),
@@ -663,7 +698,7 @@ def update_build(self, state=None):
# Attempt to stop unicode errors on build reporting
for key, val in list(self.build.items()):
- if isinstance(val, six.binary_type):
+ if isinstance(val, bytes):
self.build[key] = val.decode('utf-8', 'ignore')
# We are selective about when we update the build object here
@@ -678,7 +713,7 @@ def update_build(self, state=None):
if update_build:
try:
api_v2.build(self.build['id']).put(self.build)
- except HttpClientError as e:
+ except HttpClientError:
log.exception(
'Unable to update build: id=%d',
self.build['id'],
@@ -717,7 +752,7 @@ class DockerBuildEnvironment(BuildEnvironment):
def __init__(self, *args, **kwargs):
self.docker_socket = kwargs.pop('docker_socket', DOCKER_SOCKET)
- super(DockerBuildEnvironment, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.client = None
self.container = None
self.container_name = slugify(
@@ -769,10 +804,11 @@ def __enter__(self):
project=self.project.slug,
version=self.version.slug,
msg=(
- 'Removing stale container {0}'
- .format(self.container_id)
+ 'Removing stale container {}'.format(
+ self.container_id,
+ )
),
- )
+ ),
)
client = self.get_client()
client.remove_container(self.container_id)
@@ -833,7 +869,7 @@ def __exit__(self, exc_type, exc_value, tb):
if not all([exc_type, exc_value, tb]):
exc_type, exc_value, tb = sys.exc_info()
- return super(DockerBuildEnvironment, self).__exit__(exc_type, exc_value, tb)
+ return super().__exit__(exc_type, exc_value, tb)
def get_client(self):
"""Create Docker client connection."""
@@ -844,7 +880,7 @@ def get_client(self):
version=DOCKER_VERSION,
)
return self.client
- except DockerException as e:
+ except DockerException:
log.exception(
LOG_TEMPLATE.format(
project=self.project.slug,
@@ -942,10 +978,10 @@ def update_build_from_container_state(self):
)
elif state.get('Error'):
self.failure = BuildEnvironmentError((
- _('Build exited due to unknown error: {0}')
- .format(state.get('Error'))
- ),
- )
+ _('Build exited due to unknown error: {0}').format(
+ state.get('Error'),
+ )
+ ),)
def create_container(self):
"""Create docker container."""
@@ -970,7 +1006,7 @@ def create_container(self):
environment=self.environment,
)
client.start(container=self.container_id)
- except ConnectionError as e:
+ except ConnectionError:
log.exception(
LOG_TEMPLATE.format(
project=self.project.slug,
diff --git a/readthedocs/doc_builder/exceptions.py b/readthedocs/doc_builder/exceptions.py
index 362b18015fe..eaa2858aa6e 100644
--- a/readthedocs/doc_builder/exceptions.py
+++ b/readthedocs/doc_builder/exceptions.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
-"""Exceptions raised when building documentation."""
-from __future__ import division, print_function, unicode_literals
+"""Exceptions raised when building documentation."""
from django.utils.translation import ugettext_noop
@@ -11,9 +10,12 @@ class BuildEnvironmentException(Exception):
status_code = None
def __init__(self, message=None, **kwargs):
- self.status_code = kwargs.pop('status_code', None) or self.status_code or 1
+ self.status_code = kwargs.pop(
+ 'status_code',
+ None,
+ ) or self.status_code or 1
message = message or self.get_default_message()
- super(BuildEnvironmentException, self).__init__(message, **kwargs)
+ super().__init__(message, **kwargs)
def get_default_message(self):
return self.message
diff --git a/readthedocs/doc_builder/loader.py b/readthedocs/doc_builder/loader.py
index 0edcaace778..016cc6bba9a 100644
--- a/readthedocs/doc_builder/loader.py
+++ b/readthedocs/doc_builder/loader.py
@@ -1,16 +1,26 @@
+# -*- coding: utf-8 -*-
+
"""Lookup tables for builders and backends."""
-from __future__ import absolute_import
from importlib import import_module
from django.conf import settings
+
# Managers
mkdocs = import_module(
- getattr(settings, 'MKDOCS_BACKEND',
- 'readthedocs.doc_builder.backends.mkdocs'))
+ getattr(
+ settings,
+ 'MKDOCS_BACKEND',
+ 'readthedocs.doc_builder.backends.mkdocs',
+ ),
+)
sphinx = import_module(
- getattr(settings, 'SPHINX_BACKEND',
- 'readthedocs.doc_builder.backends.sphinx'))
+ getattr(
+ settings,
+ 'SPHINX_BACKEND',
+ 'readthedocs.doc_builder.backends.sphinx',
+ ),
+)
BUILDER_BY_NAME = {
# Possible HTML Builders
diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py
index 9a2302c41ab..69628d19c5e 100644
--- a/readthedocs/doc_builder/python_environments.py
+++ b/readthedocs/doc_builder/python_environments.py
@@ -1,12 +1,6 @@
# -*- coding: utf-8 -*-
-"""An abstraction over virtualenv and Conda environments."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""An abstraction over virtualenv and Conda environments."""
import copy
import itertools
@@ -15,8 +9,6 @@
import os
import shutil
-import six
-from builtins import object, open
from django.conf import settings
from readthedocs.doc_builder.config import load_yaml_config
@@ -26,10 +18,11 @@
from readthedocs.projects.constants import LOG_TEMPLATE
from readthedocs.projects.models import Feature
+
log = logging.getLogger(__name__)
-class PythonEnvironment(object):
+class PythonEnvironment:
"""An isolated environment into which Python packages can be installed."""
@@ -48,24 +41,29 @@ def delete_existing_build_dir(self):
# Handle deleting old build dir
build_dir = os.path.join(
self.venv_path(),
- 'build')
+ 'build',
+ )
if os.path.exists(build_dir):
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg='Removing existing build directory',
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg='Removing existing build directory',
+ ),
+ )
shutil.rmtree(build_dir)
def delete_existing_venv_dir(self):
venv_dir = self.venv_path()
# Handle deleting old venv dir
if os.path.exists(venv_dir):
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg='Removing existing venv directory',
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg='Removing existing venv directory',
+ ),
+ )
shutil.rmtree(venv_dir)
def install_package(self):
@@ -73,8 +71,8 @@ def install_package(self):
getattr(settings, 'USE_PIP_INSTALL', False)):
extra_req_param = ''
if self.config.python.extra_requirements:
- extra_req_param = '[{0}]'.format(
- ','.join(self.config.python.extra_requirements)
+ extra_req_param = '[{}]'.format(
+ ','.join(self.config.python.extra_requirements),
)
self.build_env.run(
self.venv_bin(filename='python'),
@@ -84,7 +82,7 @@ def install_package(self):
'--ignore-installed',
'--cache-dir',
self.project.pip_cache_path,
- '.{0}'.format(extra_req_param),
+ '.{}'.format(extra_req_param),
cwd=self.checkout_path,
bin_path=self.venv_bin(),
)
@@ -143,7 +141,9 @@ def is_obsolete(self):
with open(self.environment_json_path(), 'r') as fpath:
environment_conf = json.load(fpath)
except (IOError, TypeError, KeyError, ValueError):
- log.warning('Unable to read/parse readthedocs-environment.json file')
+ log.warning(
+ 'Unable to read/parse readthedocs-environment.json file',
+ )
# We remove the JSON file here to avoid cycling over time with a
# corrupted file.
os.remove(self.environment_json_path())
@@ -177,7 +177,15 @@ def is_obsolete(self):
])
def save_environment_json(self):
- """Save on disk Python and build image versions used to create the venv."""
+ """
+ Save on builders disk data about the environment used to build docs.
+
+ The data is saved as a ``.json`` file with this information on it:
+
+ - python.version
+ - build.image
+ - build.hash
+ """
data = {
'python': {
'version': self.config.python_full_version,
@@ -196,7 +204,7 @@ def save_environment_json(self):
with open(self.environment_json_path(), 'w') as fpath:
# Compatibility for Py2 and Py3. ``io.TextIOWrapper`` expects
# unicode but ``json.dumps`` returns str in Py2.
- fpath.write(six.text_type(json.dumps(data)))
+ fpath.write(str(json.dumps(data)))
class Virtualenv(PythonEnvironment):
@@ -243,9 +251,7 @@ def install_core_requirements(self):
# so it is used when installing the other requirements.
cmd = pip_install_cmd + ['pip']
self.build_env.run(
- *cmd,
- bin_path=self.venv_bin(),
- cwd=self.checkout_path
+ *cmd, bin_path=self.venv_bin(), cwd=self.checkout_path
)
requirements = [
@@ -277,7 +283,7 @@ def install_core_requirements(self):
negative='sphinx<1.8',
),
'sphinx-rtd-theme<0.5',
- 'readthedocs-sphinx-ext<0.6'
+ 'readthedocs-sphinx-ext<0.6',
])
cmd = copy.copy(pip_install_cmd)
@@ -298,8 +304,12 @@ def install_user_requirements(self):
requirements_file_path = self.config.python.requirements
if not requirements_file_path and requirements_file_path != '':
builder_class = get_builder_class(self.config.doctype)
- docs_dir = (builder_class(build_env=self.build_env, python_env=self)
- .docs_dir())
+ docs_dir = (
+ builder_class(
+ build_env=self.build_env,
+ python_env=self,
+ ).docs_dir()
+ )
paths = [docs_dir, '']
req_files = ['pip_requirements.txt', 'requirements.txt']
for path, req_file in itertools.product(paths, req_files):
@@ -348,11 +358,13 @@ def setup_base(self):
if os.path.exists(version_path):
# Re-create conda directory each time to keep fresh state
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg='Removing existing conda directory',
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg='Removing existing conda directory',
+ ),
+ )
shutil.rmtree(version_path)
self.build_env.run(
'conda',
diff --git a/readthedocs/doc_builder/signals.py b/readthedocs/doc_builder/signals.py
index 419531a5630..5821ddeccf2 100644
--- a/readthedocs/doc_builder/signals.py
+++ b/readthedocs/doc_builder/signals.py
@@ -1,9 +1,10 @@
-"""Signals for adding custom context data"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Signals for adding custom context data."""
import django.dispatch
+
finalize_sphinx_context_data = django.dispatch.Signal(
- providing_args=['buildenv', 'context', 'response_data']
+ providing_args=['buildenv', 'context', 'response_data'],
)
diff --git a/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl b/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl
index e36cdd16af5..0ec2b9c331b 100644
--- a/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl
+++ b/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl
@@ -13,6 +13,7 @@
# https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl
#
+from __future__ import absolute_import, division, print_function, unicode_literals
import importlib
import sys
@@ -81,9 +82,9 @@ context = {
("{{ slug }}", "{{ url }}"),{% endfor %}
],
'slug': '{{ project.slug }}',
- 'name': u'{{ project.name }}',
- 'rtd_language': u'{{ project.language }}',
- 'programming_language': u'{{ project.programming_language }}',
+ 'name': '{{ project.name }}',
+ 'rtd_language': '{{ project.language }}',
+ 'programming_language': '{{ project.programming_language }}',
'canonical_url': '{{ project.get_canonical_url }}',
'analytics_code': '{{ project.analytics_code }}',
'single_version': {{ project.single_version }},
diff --git a/readthedocs/doc_builder/templates/doc_builder/data.js.tmpl b/readthedocs/doc_builder/templates/doc_builder/data.js.tmpl
index 4dda93914e8..29ab61b0e65 100644
--- a/readthedocs/doc_builder/templates/doc_builder/data.js.tmpl
+++ b/readthedocs/doc_builder/templates/doc_builder/data.js.tmpl
@@ -6,7 +6,7 @@ var doc_slug = "{{ slug }}";
var page_name = "{{ pagename }}";
var html_theme = "{{ html_theme }}";
-// mkdocs_page_input_path is only defined on the RTD mkdocs theme but it isn't
+// mkdocs_page_input_path is only defined on the RTD mkdocs theme but it isn't
// available on all pages (e.g. missing in search result)
if (typeof mkdocs_page_input_path !== "undefined") {
READTHEDOCS_DATA["page"] = mkdocs_page_input_path.substr(
diff --git a/readthedocs/gold/__init__.py b/readthedocs/gold/__init__.py
index b26c8ed7c84..2ef7166adf3 100644
--- a/readthedocs/gold/__init__.py
+++ b/readthedocs/gold/__init__.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
A Django app for Gold Membership.
diff --git a/readthedocs/gold/admin.py b/readthedocs/gold/admin.py
index ecd512b75a8..2da5d033444 100644
--- a/readthedocs/gold/admin.py
+++ b/readthedocs/gold/admin.py
@@ -1,7 +1,9 @@
+# -*- coding: utf-8 -*-
+
"""Django admin configuration for the Gold Membership app."""
-from __future__ import absolute_import
from django.contrib import admin
+
from .models import GoldUser
diff --git a/readthedocs/gold/apps.py b/readthedocs/gold/apps.py
index eda30768d46..c54b9d5424e 100644
--- a/readthedocs/gold/apps.py
+++ b/readthedocs/gold/apps.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
+
"""Django app configuration for the Gold Membership app."""
-from __future__ import absolute_import
from django.apps import AppConfig
diff --git a/readthedocs/gold/forms.py b/readthedocs/gold/forms.py
index 949ab9c4c61..84c19fba387 100644
--- a/readthedocs/gold/forms.py
+++ b/readthedocs/gold/forms.py
@@ -1,10 +1,8 @@
-"""Gold subscription forms."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Gold subscription forms."""
-from builtins import object
from django import forms
-
from django.utils.translation import ugettext_lazy as _
from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin
@@ -24,7 +22,7 @@ class GoldSubscriptionForm(StripeResourceMixin, StripeModelForm):
:py:class:`StripeResourceMixin` for common operations against the Stripe API.
"""
- class Meta(object):
+ class Meta:
model = GoldUser
fields = ['last_4_card_digits', 'level']
@@ -32,9 +30,11 @@ class Meta(object):
required=True,
min_length=4,
max_length=4,
- widget=forms.HiddenInput(attrs={
- 'data-bind': 'valueInit: last_4_card_digits, value: last_4_card_digits',
- })
+ widget=forms.HiddenInput(
+ attrs={
+ 'data-bind': 'valueInit: last_4_card_digits, value: last_4_card_digits',
+ },
+ ),
)
level = forms.ChoiceField(
@@ -44,7 +44,7 @@ class Meta(object):
def clean(self):
self.instance.user = self.customer
- return super(GoldSubscriptionForm, self).clean()
+ return super().clean()
def validate_stripe(self):
subscription = self.get_subscription()
@@ -54,7 +54,8 @@ def validate_stripe(self):
def get_customer_kwargs(self):
data = {
- 'description': self.customer.get_full_name() or self.customer.username,
+ 'description': self.customer.get_full_name() or
+ self.customer.username,
'email': self.customer.email,
'id': self.instance.stripe_id or None,
}
@@ -82,7 +83,7 @@ def get_subscription(self):
# Add a new subscription
subscription = customer.subscriptions.create(
plan=self.cleaned_data['level'],
- source=self.cleaned_data['stripe_token']
+ source=self.cleaned_data['stripe_token'],
)
return subscription
@@ -91,13 +92,13 @@ def get_subscription(self):
class GoldProjectForm(forms.Form):
project = forms.ChoiceField(
required=True,
- help_text='Select a project.'
+ help_text='Select a project.',
)
def __init__(self, active_user, *args, **kwargs):
self.user = kwargs.pop('user', None)
self.projects = kwargs.pop('projects', None)
- super(GoldProjectForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.fields['project'].choices = self.generate_choices(active_user)
def generate_choices(self, active_user):
@@ -114,8 +115,11 @@ def clean_project(self):
return project_slug
def clean(self):
- cleaned_data = super(GoldProjectForm, self).clean()
+ cleaned_data = super().clean()
if self.projects.count() < self.user.num_supported_projects:
return cleaned_data
- self.add_error(None, 'You already have the max number of supported projects.')
+ self.add_error(
+ None,
+ 'You already have the max number of supported projects.',
+ )
diff --git a/readthedocs/gold/migrations/0001_initial.py b/readthedocs/gold/migrations/0001_initial.py
index baeb6d28361..80a61461da8 100644
--- a/readthedocs/gold/migrations/0001_initial.py
+++ b/readthedocs/gold/migrations/0001_initial.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/gold/migrations/0002_rename_last_4_digits.py b/readthedocs/gold/migrations/0002_rename_last_4_digits.py
index 2ed345fd3ce..478681e4ad3 100644
--- a/readthedocs/gold/migrations/0002_rename_last_4_digits.py
+++ b/readthedocs/gold/migrations/0002_rename_last_4_digits.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-07-16 15:45
-from __future__ import unicode_literals
-
-from django.db import migrations, models
+from django.db import migrations
class Migration(migrations.Migration):
diff --git a/readthedocs/gold/migrations/0003_add_missing_model_change_migrations.py b/readthedocs/gold/migrations/0003_add_missing_model_change_migrations.py
index 2e919ac202e..f1f9f1dbd5f 100644
--- a/readthedocs/gold/migrations/0003_add_missing_model_change_migrations.py
+++ b/readthedocs/gold/migrations/0003_add_missing_model_change_migrations.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-31 11:25
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/gold/migrations/0004_add_vat_id.py b/readthedocs/gold/migrations/0004_add_vat_id.py
index eab1771f1a2..ee4899eebac 100644
--- a/readthedocs/gold/migrations/0004_add_vat_id.py
+++ b/readthedocs/gold/migrations/0004_add_vat_id.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-10-22 07:13
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/gold/models.py b/readthedocs/gold/models.py
index b87e0f72345..c73ec79e39d 100644
--- a/readthedocs/gold/models.py
+++ b/readthedocs/gold/models.py
@@ -1,13 +1,6 @@
# -*- coding: utf-8 -*-
"""Django models for recurring donations aka Gold Membership."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import math
from django.db import models
@@ -16,6 +9,7 @@
from readthedocs.projects.models import Project
+
#: The membership options that are currently available
LEVEL_CHOICES = (
('v1-org-5', '$5/mo'),
@@ -62,7 +56,7 @@ class GoldUser(models.Model):
business_vat_id = models.CharField(max_length=128, null=True, blank=True)
def __str__(self):
- return 'Gold Level %s for %s' % (self.level, self.user)
+ return 'Gold Level {} for {}'.format(self.level, self.user)
@property
def num_supported_projects(self):
diff --git a/readthedocs/gold/signals.py b/readthedocs/gold/signals.py
index a3cef14ca9e..a5b377290be 100644
--- a/readthedocs/gold/signals.py
+++ b/readthedocs/gold/signals.py
@@ -1,6 +1,7 @@
-"""Gold model signals"""
+# -*- coding: utf-8 -*-
+
+"""Gold model signals."""
-from __future__ import absolute_import
from django.db.models.signals import pre_delete
from django.dispatch import receiver
@@ -11,6 +12,6 @@
@receiver(pre_delete, sender=GoldUser)
def delete_customer(sender, instance, **__):
- """On Gold subscription deletion, remove the customer from Stripe"""
+ """On Gold subscription deletion, remove the customer from Stripe."""
if sender == GoldUser and instance.stripe_id is not None:
utils.delete_customer(instance.stripe_id)
diff --git a/readthedocs/gold/templates/gold/projects.html b/readthedocs/gold/templates/gold/projects.html
index 682b7bf3c9f..e832b53ee67 100644
--- a/readthedocs/gold/templates/gold/projects.html
+++ b/readthedocs/gold/templates/gold/projects.html
@@ -48,4 +48,3 @@ {% trans "Add a project" %}
{% endblock %}
-
diff --git a/readthedocs/gold/tests/test_forms.py b/readthedocs/gold/tests/test_forms.py
index acf1c82fe1e..c379bba3b79 100644
--- a/readthedocs/gold/tests/test_forms.py
+++ b/readthedocs/gold/tests/test_forms.py
@@ -1,13 +1,13 @@
-from __future__ import absolute_import
-import mock
+# -*- coding: utf-8 -*-
import django_dynamic_fixture as fixture
-from django.test import TestCase
+import mock
from django.contrib.auth.models import User
+from django.test import TestCase
from readthedocs.projects.models import Project
-from ..models import GoldUser
from ..forms import GoldSubscriptionForm
+from ..models import GoldUser
class GoldSubscriptionFormTests(TestCase):
@@ -32,7 +32,7 @@ def mock_request(self, resp):
self.mocks['request'].request = mock.Mock(side_effect=resp)
def test_add_subscription(self):
- """Valid subscription form"""
+ """Valid subscription form."""
subscription_list = {
'object': 'list',
'data': [],
@@ -44,7 +44,7 @@ def test_add_subscription(self):
'id': 'cus_12345',
'description': self.user.get_full_name(),
'email': self.user.email,
- 'subscriptions': subscription_list
+ 'subscriptions': subscription_list,
}
subscription_obj = {
'id': 'sub_12345',
@@ -56,7 +56,7 @@ def test_add_subscription(self):
'amount': 1000,
'currency': 'usd',
'name': 'Test',
- }
+ },
}
self.mock_request([
(customer_obj, ''),
@@ -65,13 +65,14 @@ def test_add_subscription(self):
])
# Create user and subscription
- subscription_form = GoldSubscriptionForm({
- 'level': 'v1-org-5',
- 'last_4_card_digits': '0000',
- 'stripe_token': 'GARYBUSEY',
- 'business_vat_id': 'business-vat-id',
- },
- customer=self.user,
+ subscription_form = GoldSubscriptionForm(
+ {
+ 'level': 'v1-org-5',
+ 'last_4_card_digits': '0000',
+ 'stripe_token': 'GARYBUSEY',
+ 'business_vat_id': 'business-vat-id',
+ },
+ customer=self.user,
)
self.assertTrue(subscription_form.is_valid())
subscription = subscription_form.save()
@@ -83,22 +84,28 @@ def test_add_subscription(self):
self.assertEqual(self.user.gold.first().level, 'v1-org-5')
self.mocks['request'].request.assert_has_calls([
- mock.call('post',
- '/v1/customers',
- {'description': mock.ANY, 'email': mock.ANY, 'business_vat_id': 'business-vat-id'},
- mock.ANY),
- mock.call('get',
- '/v1/customers/cus_12345/subscriptions',
- mock.ANY,
- mock.ANY),
- mock.call('post',
- '/v1/customers/cus_12345/subscriptions',
- {'source': mock.ANY, 'plan': 'v1-org-5'},
- mock.ANY),
+ mock.call(
+ 'post',
+ '/v1/customers',
+ {'description': mock.ANY, 'email': mock.ANY, 'business_vat_id': 'business-vat-id'},
+ mock.ANY,
+ ),
+ mock.call(
+ 'get',
+ '/v1/customers/cus_12345/subscriptions',
+ mock.ANY,
+ mock.ANY,
+ ),
+ mock.call(
+ 'post',
+ '/v1/customers/cus_12345/subscriptions',
+ {'source': mock.ANY, 'plan': 'v1-org-5'},
+ mock.ANY,
+ ),
])
def test_add_subscription_update_user(self):
- """Valid subscription form"""
+ """Valid subscription form."""
subscription_list = {
'object': 'list',
'data': [],
@@ -110,7 +117,7 @@ def test_add_subscription_update_user(self):
'id': 'cus_12345',
'description': self.user.get_full_name(),
'email': self.user.email,
- 'subscriptions': subscription_list
+ 'subscriptions': subscription_list,
}
subscription_obj = {
'id': 'sub_12345',
@@ -122,7 +129,7 @@ def test_add_subscription_update_user(self):
'amount': 1000,
'currency': 'usd',
'name': 'Test',
- }
+ },
}
self.mock_request([
(customer_obj, ''),
@@ -134,11 +141,13 @@ def test_add_subscription_update_user(self):
# Create user and update the current gold subscription
golduser = fixture.get(GoldUser, user=self.user, stripe_id='cus_12345')
subscription_form = GoldSubscriptionForm(
- {'level': 'v1-org-5',
- 'last_4_card_digits': '0000',
- 'stripe_token': 'GARYBUSEY'},
+ {
+ 'level': 'v1-org-5',
+ 'last_4_card_digits': '0000',
+ 'stripe_token': 'GARYBUSEY',
+ },
customer=self.user,
- instance=golduser
+ instance=golduser,
)
self.assertTrue(subscription_form.is_valid())
subscription = subscription_form.save()
@@ -149,26 +158,34 @@ def test_add_subscription_update_user(self):
self.assertEqual(self.user.gold.first().level, 'v1-org-5')
self.mocks['request'].request.assert_has_calls([
- mock.call('get',
- '/v1/customers/cus_12345',
- {},
- mock.ANY),
- mock.call('post',
- '/v1/customers/cus_12345',
- {'description': mock.ANY, 'email': mock.ANY},
- mock.ANY),
- mock.call('get',
- '/v1/customers/cus_12345/subscriptions',
- mock.ANY,
- mock.ANY),
- mock.call('post',
- '/v1/customers/cus_12345/subscriptions',
- {'source': mock.ANY, 'plan': 'v1-org-5'},
- mock.ANY),
+ mock.call(
+ 'get',
+ '/v1/customers/cus_12345',
+ {},
+ mock.ANY,
+ ),
+ mock.call(
+ 'post',
+ '/v1/customers/cus_12345',
+ {'description': mock.ANY, 'email': mock.ANY},
+ mock.ANY,
+ ),
+ mock.call(
+ 'get',
+ '/v1/customers/cus_12345/subscriptions',
+ mock.ANY,
+ mock.ANY,
+ ),
+ mock.call(
+ 'post',
+ '/v1/customers/cus_12345/subscriptions',
+ {'source': mock.ANY, 'plan': 'v1-org-5'},
+ mock.ANY,
+ ),
])
def test_update_subscription_plan(self):
- """Update subcription plan"""
+ """Update subcription plan."""
subscription_obj = {
'id': 'sub_12345',
'object': 'subscription',
@@ -179,7 +196,7 @@ def test_update_subscription_plan(self):
'amount': 1000,
'currency': 'usd',
'name': 'Test',
- }
+ },
}
subscription_list = {
'object': 'list',
@@ -192,7 +209,7 @@ def test_update_subscription_plan(self):
'id': 'cus_12345',
'description': self.user.get_full_name(),
'email': self.user.email,
- 'subscriptions': subscription_list
+ 'subscriptions': subscription_list,
}
self.mock_request([
(customer_obj, ''),
@@ -200,10 +217,12 @@ def test_update_subscription_plan(self):
(subscription_obj, ''),
])
subscription_form = GoldSubscriptionForm(
- {'level': 'v1-org-5',
- 'last_4_card_digits': '0000',
- 'stripe_token': 'GARYBUSEY'},
- customer=self.user
+ {
+ 'level': 'v1-org-5',
+ 'last_4_card_digits': '0000',
+ 'stripe_token': 'GARYBUSEY',
+ },
+ customer=self.user,
)
self.assertTrue(subscription_form.is_valid())
subscription = subscription_form.save()
@@ -213,16 +232,22 @@ def test_update_subscription_plan(self):
self.assertEqual(self.user.gold.first().level, 'v1-org-5')
self.mocks['request'].request.assert_has_calls([
- mock.call('post',
- '/v1/customers',
- {'description': mock.ANY, 'email': mock.ANY},
- mock.ANY),
- mock.call('get',
- '/v1/customers/cus_12345/subscriptions',
- mock.ANY,
- mock.ANY),
- mock.call('post',
- '/v1/subscriptions/sub_12345',
- {'source': mock.ANY, 'plan': 'v1-org-5'},
- mock.ANY),
+ mock.call(
+ 'post',
+ '/v1/customers',
+ {'description': mock.ANY, 'email': mock.ANY},
+ mock.ANY,
+ ),
+ mock.call(
+ 'get',
+ '/v1/customers/cus_12345/subscriptions',
+ mock.ANY,
+ mock.ANY,
+ ),
+ mock.call(
+ 'post',
+ '/v1/subscriptions/sub_12345',
+ {'source': mock.ANY, 'plan': 'v1-org-5'},
+ mock.ANY,
+ ),
])
diff --git a/readthedocs/gold/tests/test_signals.py b/readthedocs/gold/tests/test_signals.py
index 62618581c06..f55e0b53be8 100644
--- a/readthedocs/gold/tests/test_signals.py
+++ b/readthedocs/gold/tests/test_signals.py
@@ -1,14 +1,10 @@
-from __future__ import absolute_import
-import mock
+# -*- coding: utf-8 -*-
import django_dynamic_fixture as fixture
-from django.test import TestCase
+import mock
from django.contrib.auth.models import User
-from django.db.models.signals import pre_delete
-
-from readthedocs.projects.models import Project
+from django.test import TestCase
from ..models import GoldUser
-from ..signals import delete_customer
class GoldSignalTests(TestCase):
diff --git a/readthedocs/gold/urls.py b/readthedocs/gold/urls.py
index dd8b6e354a4..295e5250d10 100644
--- a/readthedocs/gold/urls.py
+++ b/readthedocs/gold/urls.py
@@ -1,6 +1,7 @@
-"""Gold subscription URLs"""
+# -*- coding: utf-8 -*-
+
+"""Gold subscription URLs."""
-from __future__ import absolute_import
from django.conf.urls import url
from readthedocs.gold import views
@@ -9,12 +10,24 @@
urlpatterns = [
url(r'^$', views.DetailGoldSubscription.as_view(), name='gold_detail'),
- url(r'^subscription/$', views.UpdateGoldSubscription.as_view(),
- name='gold_subscription'),
- url(r'^cancel/$', views.DeleteGoldSubscription.as_view(), name='gold_cancel'),
+ url(
+ r'^subscription/$',
+ views.UpdateGoldSubscription.as_view(),
+ name='gold_subscription',
+ ),
+ url(
+ r'^cancel/$',
+ views.DeleteGoldSubscription.as_view(),
+ name='gold_cancel',
+ ),
url(r'^projects/$', views.projects, name='gold_projects'),
- url((r'^projects/remove/(?P{project_slug})/$'
- .format(project_slug=PROJECT_SLUG_REGEX)),
+ url(
+ (
+ r'^projects/remove/(?P{project_slug})/$'.format(
+ project_slug=PROJECT_SLUG_REGEX,
+ )
+ ),
views.projects_remove,
- name='gold_projects_remove'),
+ name='gold_projects_remove',
+ ),
]
diff --git a/readthedocs/gold/views.py b/readthedocs/gold/views.py
index a0122d8286c..d5f0d6121c5 100644
--- a/readthedocs/gold/views.py
+++ b/readthedocs/gold/views.py
@@ -1,20 +1,14 @@
# -*- coding: utf-8 -*-
-"""Gold subscription views."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals
-)
+"""Gold subscription views."""
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.messages.views import SuccessMessageMixin
-from django.urls import reverse, reverse_lazy
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
+from django.urls import reverse, reverse_lazy
from django.utils.translation import ugettext_lazy as _
from vanilla import DeleteView, DetailView, UpdateView
@@ -26,8 +20,11 @@
from .models import GoldUser
-class GoldSubscriptionMixin(SuccessMessageMixin, StripeMixin,
- LoginRequiredMixin):
+class GoldSubscriptionMixin(
+ SuccessMessageMixin,
+ StripeMixin,
+ LoginRequiredMixin,
+):
"""Gold subscription mixin for view classes."""
@@ -43,16 +40,16 @@ def get_object(self):
def get_form(self, data=None, files=None, **kwargs):
"""Pass in copy of POST data to avoid read only QueryDicts."""
kwargs['customer'] = self.request.user
- return super(GoldSubscriptionMixin, self).get_form(data, files, **kwargs)
+ return super().get_form(data, files, **kwargs)
def get_success_url(self, **__):
return reverse_lazy('gold_detail')
def get_template_names(self):
- return ('gold/subscription{0}.html'.format(self.template_name_suffix))
+ return ('gold/subscription{}.html'.format(self.template_name_suffix))
def get_context_data(self, **kwargs):
- context = super(GoldSubscriptionMixin, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
domains = Domain.objects.filter(project__users=self.request.user)
context['domains'] = domains
return context
@@ -70,7 +67,7 @@ def get(self, request, *args, **kwargs):
If there is a gold subscription instance, then we show the normal detail
page, otherwise show the registration form
"""
- resp = super(DetailGoldSubscription, self).get(request, *args, **kwargs)
+ resp = super().get(request, *args, **kwargs)
if self.object is None:
return HttpResponseRedirect(reverse('gold_subscription'))
return resp
@@ -94,7 +91,7 @@ class DeleteGoldSubscription(GoldSubscriptionMixin, DeleteView):
def post(self, request, *args, **kwargs):
"""Add success message to delete post."""
- resp = super(DeleteGoldSubscription, self).post(request, *args, **kwargs)
+ resp = super().post(request, *args, **kwargs)
success_message = self.get_success_message({})
if success_message:
messages.success(self.request, success_message)
@@ -108,7 +105,11 @@ def projects(request):
if request.method == 'POST':
form = GoldProjectForm(
- active_user=request.user, data=request.POST, user=gold_user, projects=gold_projects)
+ active_user=request.user,
+ data=request.POST,
+ user=gold_user,
+ projects=gold_projects,
+ )
if form.is_valid():
to_add = Project.objects.get(slug=form.cleaned_data['project'])
gold_user.projects.add(to_add)
@@ -121,13 +122,16 @@ def projects(request):
form = GoldProjectForm(active_user=request.user)
return render(
- request, 'gold/projects.html', {
+ request,
+ 'gold/projects.html',
+ {
'form': form,
'gold_user': gold_user,
'publishable': settings.STRIPE_PUBLISHABLE,
'user': request.user,
'projects': gold_projects,
- })
+ },
+ )
@login_required
diff --git a/readthedocs/integrations/admin.py b/readthedocs/integrations/admin.py
index cbeabea02e6..1b27ed8f5c8 100644
--- a/readthedocs/integrations/admin.py
+++ b/readthedocs/integrations/admin.py
@@ -1,12 +1,13 @@
+# -*- coding: utf-8 -*-
+
"""Integration admin models."""
-from __future__ import absolute_import
-from django.contrib import admin
from django import urls
+from django.contrib import admin
from django.utils.safestring import mark_safe
from pygments.formatters import HtmlFormatter
-from .models import Integration, HttpExchange
+from .models import HttpExchange, Integration
def pretty_json_field(field, description, include_styles=False):
@@ -18,11 +19,13 @@ def inner(_, obj):
if include_styles:
formatter = HtmlFormatter(style='colorful')
styles = ''
- return mark_safe('{1}
{2}'.format(
- 'float: left;',
- obj.formatted_json(field),
- styles,
- ))
+ return mark_safe(
+ '{}
{}'.format(
+ 'float: left;',
+ obj.formatted_json(field),
+ styles,
+ ),
+ )
inner.short_description = description
return inner
@@ -96,16 +99,20 @@ def exchanges(self, obj):
JSONField doesn't do well with fieldsets for whatever reason. This is
just to link to the exchanges.
"""
- url = urls.reverse('admin:{0}_{1}_changelist'.format(
- HttpExchange._meta.app_label, # pylint: disable=protected-access
- HttpExchange._meta.model_name, # pylint: disable=protected-access
- ))
- return mark_safe('{3} HTTP transactions'.format(
- url,
- 'integrations',
- obj.pk,
- obj.exchanges.count(),
- ))
+ url = urls.reverse(
+ 'admin:{}_{}_changelist'.format(
+ HttpExchange._meta.app_label, # pylint: disable=protected-access
+ HttpExchange._meta.model_name, # pylint: disable=protected-access
+ ),
+ )
+ return mark_safe(
+ '{} HTTP transactions'.format(
+ url,
+ 'integrations',
+ obj.pk,
+ obj.exchanges.count(),
+ ),
+ )
exchanges.short_description = 'HTTP exchanges'
diff --git a/readthedocs/integrations/migrations/0001_add_http_exchange.py b/readthedocs/integrations/migrations/0001_add_http_exchange.py
index b4440b6eab5..c1ee6c0b714 100644
--- a/readthedocs/integrations/migrations/0001_add_http_exchange.py
+++ b/readthedocs/integrations/migrations/0001_add_http_exchange.py
@@ -1,12 +1,10 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-03-16 18:30
-from __future__ import unicode_literals
+import uuid
-from __future__ import absolute_import
-from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
-import uuid
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/integrations/migrations/0002_add-webhook.py b/readthedocs/integrations/migrations/0002_add-webhook.py
index 3d061993c7a..e42ef2a8613 100644
--- a/readthedocs/integrations/migrations/0002_add-webhook.py
+++ b/readthedocs/integrations/migrations/0002_add-webhook.py
@@ -1,11 +1,8 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-03-29 21:29
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/integrations/migrations/0003_add_missing_model_change_migrations.py b/readthedocs/integrations/migrations/0003_add_missing_model_change_migrations.py
index a1356c48fa2..d1a4314417d 100644
--- a/readthedocs/integrations/migrations/0003_add_missing_model_change_migrations.py
+++ b/readthedocs/integrations/migrations/0003_add_missing_model_change_migrations.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-31 11:25
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/integrations/models.py b/readthedocs/integrations/models.py
index 7514699cef6..c562c372983 100644
--- a/readthedocs/integrations/models.py
+++ b/readthedocs/integrations/models.py
@@ -2,18 +2,10 @@
"""Integration models for external services."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import json
import re
import uuid
-from builtins import object, str
from django.contrib.contenttypes.fields import (
GenericForeignKey,
GenericRelation,
@@ -73,11 +65,11 @@ def from_exchange(self, req, resp, related_object, payload=None):
# headers. HTTP headers are prefixed with `HTTP_`, which we remove,
# and because the keys are all uppercase, we'll normalize them to
# title case-y hyphen separated values.
- request_headers = dict(
- (key[5:].title().replace('_', '-'), str(val))
+ request_headers = {
+ key[5:].title().replace('_', '-'): str(val)
for (key, val) in list(req.META.items())
- if key.startswith('HTTP_'),
- ) # yapf: disable
+ if key.startswith('HTTP_')
+ } # yapf: disable
request_headers['Content-Type'] = req.content_type
# Remove unwanted headers
@@ -146,7 +138,7 @@ class HttpExchange(models.Model):
objects = HttpExchangeManager()
- class Meta(object):
+ class Meta:
ordering = ['-date']
def __str__(self):
@@ -191,11 +183,11 @@ class IntegrationQuerySet(models.QuerySet):
def _get_subclass(self, integration_type):
# Build a mapping of integration_type -> class dynamically
- class_map = dict(
- (cls.integration_type_id, cls)
+ class_map = {
+ cls.integration_type_id: cls
for cls in self.model.__subclasses__()
- if hasattr(cls, 'integration_type_id'),
- ) # yapf: disable
+ if hasattr(cls, 'integration_type_id')
+ } # yapf: disable
return class_map.get(integration_type)
def _get_subclass_replacement(self, original):
@@ -215,7 +207,7 @@ def _get_subclass_replacement(self, original):
return new
def get(self, *args, **kwargs):
- original = super(IntegrationQuerySet, self).get(*args, **kwargs)
+ original = super().get(*args, **kwargs)
return self._get_subclass_replacement(original)
def subclass(self, instance):
@@ -277,7 +269,8 @@ class Integration(models.Model):
def __str__(self):
return (
_('{0} for {1}')
- .format(self.get_integration_type_display(), self.project.name))
+ .format(self.get_integration_type_display(), self.project.name)
+ )
class GitHubWebhook(Integration):
@@ -285,7 +278,7 @@ class GitHubWebhook(Integration):
integration_type_id = Integration.GITHUB_WEBHOOK
has_sync = True
- class Meta(object):
+ class Meta:
proxy = True
@property
@@ -301,7 +294,7 @@ class BitbucketWebhook(Integration):
integration_type_id = Integration.BITBUCKET_WEBHOOK
has_sync = True
- class Meta(object):
+ class Meta:
proxy = True
@property
@@ -317,7 +310,7 @@ class GitLabWebhook(Integration):
integration_type_id = Integration.GITLAB_WEBHOOK
has_sync = True
- class Meta(object):
+ class Meta:
proxy = True
@property
@@ -333,7 +326,7 @@ class GenericAPIWebhook(Integration):
integration_type_id = Integration.API_WEBHOOK
has_sync = False
- class Meta(object):
+ class Meta:
proxy = True
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
@@ -346,7 +339,7 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ
if token is None:
token = default_token()
self.provider_data = {'token': token}
- super(GenericAPIWebhook, self).save(*args, **kwargs)
+ super().save(*args, **kwargs)
@property
def token(self):
diff --git a/readthedocs/integrations/utils.py b/readthedocs/integrations/utils.py
index 978da9c8504..0939a36a855 100644
--- a/readthedocs/integrations/utils.py
+++ b/readthedocs/integrations/utils.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""Integration utility functions."""
diff --git a/readthedocs/notifications/__init__.py b/readthedocs/notifications/__init__.py
index c1860cbc8d1..2802d286a1e 100644
--- a/readthedocs/notifications/__init__.py
+++ b/readthedocs/notifications/__init__.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Extensions to Django messages to support notifications to users.
@@ -18,8 +20,7 @@
__all__ = (
'Notification',
'SiteNotification',
- 'send_notification'
+ 'send_notification',
)
-
default_app_config = 'readthedocs.notifications.apps.NotificationsAppConfig'
diff --git a/readthedocs/notifications/apps.py b/readthedocs/notifications/apps.py
index 38ed93cda31..60543374e05 100644
--- a/readthedocs/notifications/apps.py
+++ b/readthedocs/notifications/apps.py
@@ -1,5 +1,6 @@
+# -*- coding: utf-8 -*-
+
"""Django app configuration for the notifications app."""
-from __future__ import absolute_import
from django.apps import AppConfig
diff --git a/readthedocs/notifications/backends.py b/readthedocs/notifications/backends.py
index 6cc794ddbbf..909248f60ca 100644
--- a/readthedocs/notifications/backends.py
+++ b/readthedocs/notifications/backends.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+
"""
Pluggable backends for the delivery of notifications.
@@ -7,10 +8,6 @@
displayed on the site.
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
-from builtins import object
from django.conf import settings
from django.http import HttpRequest
from django.utils.module_loading import import_string
@@ -35,7 +32,7 @@ def send_notification(request, notification):
backend.send(notification)
-class Backend(object):
+class Backend:
def __init__(self, request):
self.request = request
diff --git a/readthedocs/notifications/constants.py b/readthedocs/notifications/constants.py
index d15efa98448..640170aff40 100644
--- a/readthedocs/notifications/constants.py
+++ b/readthedocs/notifications/constants.py
@@ -1,6 +1,7 @@
-"""Notification constants"""
+# -*- coding: utf-8 -*-
+
+"""Notification constants."""
-from __future__ import absolute_import
from messages_extends import constants as message_constants
@@ -19,7 +20,6 @@
ERROR: message_constants.ERROR_PERSISTENT,
}
-
# Message levels to save the message into the database and mark as read
# immediately after retrieved (one-time shown message)
DEBUG_NON_PERSISTENT = 100
diff --git a/readthedocs/notifications/forms.py b/readthedocs/notifications/forms.py
index b65c1c15e76..a5ed64e966d 100644
--- a/readthedocs/notifications/forms.py
+++ b/readthedocs/notifications/forms.py
@@ -1,5 +1,6 @@
+# -*- coding: utf-8 -*-
+
"""HTML forms for sending notifications."""
-from __future__ import absolute_import
from django import forms
from django.utils.translation import ugettext_lazy as _
@@ -29,12 +30,14 @@ class SendNotificationForm(forms.Form):
def __init__(self, *args, **kwargs):
self.notification_classes = kwargs.pop('notification_classes', [])
- super(SendNotificationForm, self).__init__(*args, **kwargs)
- self.fields['source'].choices = [(cls.name, cls.name) for cls
- in self.notification_classes]
+ super().__init__(*args, **kwargs)
+ self.fields['source'].choices = [
+ (cls.name, cls.name)
+ for cls in self.notification_classes
+ ]
def clean_source(self):
"""Get the source class from the class name."""
source = self.cleaned_data['source']
- classes = dict((cls.name, cls) for cls in self.notification_classes)
+ classes = {cls.name: cls for cls in self.notification_classes}
return classes.get(source, None)
diff --git a/readthedocs/notifications/notification.py b/readthedocs/notifications/notification.py
index c4d1ca4de77..d6532941f8e 100644
--- a/readthedocs/notifications/notification.py
+++ b/readthedocs/notifications/notification.py
@@ -1,23 +1,23 @@
# -*- coding: utf-8 -*-
+
"""Support for templating of notifications."""
-from __future__ import absolute_import
-from builtins import object
import logging
+
from django.conf import settings
-from django.template import Template, Context
-from django.template.loader import render_to_string
from django.db import models
from django.http import HttpRequest
+from django.template import Context, Template
+from django.template.loader import render_to_string
-from .backends import send_notification
from . import constants
+from .backends import send_notification
log = logging.getLogger(__name__)
-class Notification(object):
+class Notification:
"""
An unsent notification linked to an object.
@@ -53,7 +53,8 @@ def get_context_data(self):
self.context_object_name: self.object,
'request': self.request,
'production_uri': '{scheme}://{host}'.format(
- scheme='https', host=settings.PRODUCTION_DOMAIN,
+ scheme='https',
+ host=settings.PRODUCTION_DOMAIN,
),
}
@@ -62,13 +63,13 @@ def get_template_names(self, backend_name, source_format=constants.HTML):
if self.object and isinstance(self.object, models.Model):
meta = self.object._meta # pylint: disable=protected-access
names.append(
- '{app}/notifications/{name}_{backend}.{source_format}'
- .format(
+ '{app}/notifications/{name}_{backend}.{source_format}'.format(
app=meta.app_label,
name=self.name or meta.model_name,
backend=backend_name,
source_format=source_format,
- ))
+ ),
+ )
return names
raise AttributeError()
@@ -122,8 +123,14 @@ class SiteNotification(Notification):
failure_level = constants.ERROR_NON_PERSISTENT
def __init__(
- self, user, success, reason=None, context_object=None,
- request=None, extra_context=None):
+ self,
+ user,
+ success,
+ reason=None,
+ context_object=None,
+ request=None,
+ extra_context=None,
+ ):
self.object = context_object
self.user = user or request.user
@@ -135,10 +142,10 @@ def __init__(
self.success = success
self.reason = reason
self.extra_context = extra_context or {}
- super(SiteNotification, self).__init__(context_object, request, user)
+ super().__init__(context_object, request, user)
def get_context_data(self):
- context = super(SiteNotification, self).get_context_data()
+ context = super().get_context_data()
context.update(self.extra_context)
return context
diff --git a/readthedocs/notifications/storages.py b/readthedocs/notifications/storages.py
index 1755db952e0..31ee884d640 100644
--- a/readthedocs/notifications/storages.py
+++ b/readthedocs/notifications/storages.py
@@ -1,21 +1,22 @@
# -*- coding: utf-8 -*-
+
"""Customised storage for notifications."""
-from __future__ import absolute_import
from django.contrib.messages.storage.base import Message
from django.db.models import Q
from django.utils.safestring import mark_safe
-from messages_extends.storages import FallbackStorage, PersistentStorage
-from messages_extends.models import Message as PersistentMessage
from messages_extends.constants import PERSISTENT_MESSAGE_LEVELS
+from messages_extends.models import Message as PersistentMessage
+from messages_extends.storages import FallbackStorage, PersistentStorage
+
+from .constants import NON_PERSISTENT_MESSAGE_LEVELS
+
try:
from django.utils import timezone
except ImportError:
from datetime import datetime as timezone
-from .constants import NON_PERSISTENT_MESSAGE_LEVELS
-
class FallbackUniqueStorage(FallbackStorage):
@@ -49,8 +50,7 @@ class FallbackUniqueStorage(FallbackStorage):
def _get(self, *args, **kwargs):
# The database backend for persistent messages doesn't support setting
# messages with ``mark_safe``, therefore, we need to do it broadly here.
- messages, all_ret = (super(FallbackUniqueStorage, self)
- ._get(self, *args, **kwargs))
+ messages, all_ret = (super()._get(self, *args, **kwargs))
safe_messages = []
for message in messages:
@@ -59,9 +59,11 @@ def _get(self, *args, **kwargs):
# process ephemeral messages
if message.level in PERSISTENT_MESSAGE_LEVELS + NON_PERSISTENT_MESSAGE_LEVELS:
message_pk = message.pk
- message = Message(message.level,
- mark_safe(message.message),
- message.extra_tags)
+ message = Message(
+ message.level,
+ mark_safe(message.message),
+ message.extra_tags,
+ )
message.pk = message_pk
safe_messages.append(message)
return safe_messages, all_ret
@@ -69,14 +71,16 @@ def _get(self, *args, **kwargs):
def add(self, level, message, extra_tags='', *args, **kwargs): # noqa
user = kwargs.get('user') or self.request.user
if not user.is_anonymous:
- persist_messages = (PersistentMessage.objects
- .filter(message=message,
- user=user,
- read=False))
+ persist_messages = (
+ PersistentMessage.objects.filter(
+ message=message,
+ user=user,
+ read=False,
+ )
+ )
if persist_messages.exists():
return
- super(FallbackUniqueStorage, self).add(level, message, extra_tags,
- *args, **kwargs)
+ super().add(level, message, extra_tags, *args, **kwargs)
class NonPersistentStorage(PersistentStorage):
@@ -133,7 +137,7 @@ def process_message(self, message, *args, **kwargs):
if message.level not in NON_PERSISTENT_MESSAGE_LEVELS:
return message
- user = kwargs.get("user") or self.get_user()
+ user = kwargs.get('user') or self.get_user()
try:
anonymous = user.is_anonymous
@@ -141,14 +145,15 @@ def process_message(self, message, *args, **kwargs):
anonymous = user.is_anonymous
if anonymous:
raise NotImplementedError(
- 'Persistent message levels cannot be used for anonymous users.')
+ 'Persistent message levels cannot be used for anonymous users.',
+ )
message_persistent = PersistentMessage()
message_persistent.level = message.level
message_persistent.message = message.message
message_persistent.extra_tags = message.extra_tags
message_persistent.user = user
- if "expires" in kwargs:
- message_persistent.expires = kwargs["expires"]
+ if 'expires' in kwargs:
+ message_persistent.expires = kwargs['expires']
message_persistent.save()
return None
diff --git a/readthedocs/notifications/urls.py b/readthedocs/notifications/urls.py
index 349c1f293aa..91803e3efef 100644
--- a/readthedocs/notifications/urls.py
+++ b/readthedocs/notifications/urls.py
@@ -1,14 +1,20 @@
-"""Renames for messages_extends URLs"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from django.conf.urls import url
+"""Renames for messages_extends URLs."""
-from messages_extends.views import message_mark_read, message_mark_all_read
+from django.conf.urls import url
+from messages_extends.views import message_mark_all_read, message_mark_read
urlpatterns = [
- url(r'^dismiss/(?P\d+)/$', message_mark_read,
- name='message_mark_read'),
- url(r'^dismiss/all/$', message_mark_all_read,
- name='message_mark_all_read'),
+ url(
+ r'^dismiss/(?P\d+)/$',
+ message_mark_read,
+ name='message_mark_read',
+ ),
+ url(
+ r'^dismiss/all/$',
+ message_mark_all_read,
+ name='message_mark_all_read',
+ ),
]
diff --git a/readthedocs/notifications/views.py b/readthedocs/notifications/views.py
index 0ba04fae0c9..0ee0495ddb8 100644
--- a/readthedocs/notifications/views.py
+++ b/readthedocs/notifications/views.py
@@ -1,8 +1,9 @@
+# -*- coding: utf-8 -*-
+
"""Django views for the notifications app."""
-from __future__ import absolute_import
-from django.views.generic import FormView
from django.contrib import admin, messages
from django.http import HttpResponseRedirect
+from django.views.generic import FormView
from .forms import SendNotificationForm
@@ -14,13 +15,11 @@ class SendNotificationView(FormView):
Accepts the following additional parameters:
- queryset
- The queryset to use to determine the users to send emails to
-
- :cvar action_name: Name of the action to pass to the form template,
- determines the action to pass back to the admin view
- :cvar notification_classes: List of :py:class:`Notification` classes to
- display in the form
+ :param queryset: Queryset to use to determine the users to send emails to
+ :param action_name: Name of the action to pass to the form template,
+ determines the action to pass back to the admin view
+ :param notification_classes: List of :py:class:`Notification` classes to
+ display in the form
"""
form_class = SendNotificationForm
@@ -35,7 +34,7 @@ def get_form_kwargs(self):
The admin posts to this view initially, so detect the send button on
form post variables. Drop additional fields if we see the send button.
"""
- kwargs = super(SendNotificationView, self).get_form_kwargs()
+ kwargs = super().get_form_kwargs()
kwargs['notification_classes'] = self.notification_classes
if 'send' not in self.request.POST:
kwargs.pop('data', None)
@@ -44,9 +43,10 @@ def get_form_kwargs(self):
def get_initial(self):
"""Add selected ids to initial form data."""
- initial = super(SendNotificationView, self).get_initial()
+ initial = super().get_initial()
initial['_selected_action'] = self.request.POST.getlist(
- admin.ACTION_CHECKBOX_NAME)
+ admin.ACTION_CHECKBOX_NAME,
+ )
return initial
def form_valid(self, form):
@@ -55,15 +55,17 @@ def form_valid(self, form):
notification_cls = form.cleaned_data['source']
for obj in self.get_queryset().all():
for recipient in self.get_object_recipients(obj):
- notification = notification_cls(context_object=obj,
- request=self.request,
- user=recipient)
+ notification = notification_cls(
+ context_object=obj,
+ request=self.request,
+ user=recipient,
+ )
notification.send()
count += 1
if count == 0:
- self.message_user("No recipients to send to", level=messages.ERROR)
+ self.message_user('No recipients to send to', level=messages.ERROR)
else:
- self.message_user("Queued {0} messages".format(count))
+ self.message_user('Queued {} messages'.format(count))
return HttpResponseRedirect(self.request.get_full_path())
def get_object_recipients(self, obj):
@@ -89,7 +91,7 @@ def get_queryset(self):
def get_context_data(self, **kwargs):
"""Return queryset in context."""
- context = super(SendNotificationView, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
recipients = []
for obj in self.get_queryset().all():
recipients.extend(self.get_object_recipients(obj))
@@ -97,14 +99,26 @@ def get_context_data(self, **kwargs):
context['action_name'] = self.action_name
return context
- def message_user(self, message, level=messages.INFO, extra_tags='',
- fail_silently=False):
+ def message_user(
+ self,
+ message,
+ level=messages.INFO,
+ extra_tags='',
+ fail_silently=False,
+ ):
"""
- Implementation of :py:meth:`django.contrib.admin.options.ModelAdmin.message_user`
+ Implementation of.
+
+ :py:meth:`django.contrib.admin.options.ModelAdmin.message_user`
Send message through messages framework
"""
# TODO generalize this or check if implementation in ModelAdmin is
# usable here
- messages.add_message(self.request, level, message, extra_tags=extra_tags,
- fail_silently=fail_silently)
+ messages.add_message(
+ self.request,
+ level,
+ message,
+ extra_tags=extra_tags,
+ fail_silently=fail_silently,
+ )
diff --git a/readthedocs/oauth/__init__.py b/readthedocs/oauth/__init__.py
index 510c93a3526..32bb8a9a4a5 100644
--- a/readthedocs/oauth/__init__.py
+++ b/readthedocs/oauth/__init__.py
@@ -1 +1,2 @@
+# -*- coding: utf-8 -*-
default_app_config = 'readthedocs.oauth.apps.OAuthConfig'
diff --git a/readthedocs/oauth/admin.py b/readthedocs/oauth/admin.py
index b961e6472c5..eeee459e6ec 100644
--- a/readthedocs/oauth/admin.py
+++ b/readthedocs/oauth/admin.py
@@ -1,9 +1,10 @@
+# -*- coding: utf-8 -*-
+
"""Admin configuration for the OAuth app."""
-from __future__ import absolute_import
from django.contrib import admin
-from .models import RemoteRepository, RemoteOrganization
+from .models import RemoteOrganization, RemoteRepository
class RemoteRepositoryAdmin(admin.ModelAdmin):
diff --git a/readthedocs/oauth/apps.py b/readthedocs/oauth/apps.py
index b8998b8f458..1486873907b 100644
--- a/readthedocs/oauth/apps.py
+++ b/readthedocs/oauth/apps.py
@@ -1,4 +1,6 @@
-"""OAuth app config"""
+# -*- coding: utf-8 -*-
+
+"""OAuth app config."""
from django.apps import AppConfig
diff --git a/readthedocs/oauth/migrations/0001_initial.py b/readthedocs/oauth/migrations/0001_initial.py
index 352af573c87..3b3c4b1547b 100644
--- a/readthedocs/oauth/migrations/0001_initial.py
+++ b/readthedocs/oauth/migrations/0001_initial.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/oauth/migrations/0002_combine_services.py b/readthedocs/oauth/migrations/0002_combine_services.py
index ab053be97f0..1290dc5d95e 100644
--- a/readthedocs/oauth/migrations/0002_combine_services.py
+++ b/readthedocs/oauth/migrations/0002_combine_services.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
-from django.conf import settings
import django.core.validators
+from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/oauth/migrations/0003_move_github.py b/readthedocs/oauth/migrations/0003_move_github.py
index 40fd4c562e0..2b4837f4e1a 100644
--- a/readthedocs/oauth/migrations/0003_move_github.py
+++ b/readthedocs/oauth/migrations/0003_move_github.py
@@ -1,12 +1,10 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-import json
import gc
+import json
import logging
-from django.db import models, migrations
+from django.db import migrations
+
log = logging.getLogger(__name__)
@@ -25,7 +23,7 @@ def chunks(queryset, chunksize=1000):
def forwards_move_repos(apps, schema_editor):
- """Moves OAuth repos"""
+ """Moves OAuth repos."""
db = schema_editor.connection.alias
# Organizations
@@ -109,7 +107,7 @@ def forwards_move_repos(apps, schema_editor):
else:
new_repo.clone_url = data.get('clone_url')
new_repo.json = json.dumps(data)
- except (SyntaxError, ValueError) as e:
+ except (SyntaxError, ValueError):
pass
new_repo.save()
log.info('Migrated project: %s', project.name)
@@ -143,21 +141,21 @@ def forwards_move_repos(apps, schema_editor):
new_repo.private = data.get('is_private', False)
new_repo.json = json.dumps(data)
- clone_urls = dict((location['name'], location['href'])
+ clone_urls = {location['name']: location['href']
for location
- in data.get('links', {}).get('clone', {}))
+ in data.get('links', {}).get('clone', {})}
if new_repo.private:
new_repo.clone_url = clone_urls.get('ssh', project.git_url)
else:
new_repo.clone_url = clone_urls.get('https', project.html_url)
- except (SyntaxError, ValueError) as e:
+ except (SyntaxError, ValueError):
pass
new_repo.save()
log.info('Migrated project: %s', project.name)
def reverse_move_repos(apps, schema_editor):
- """Drop OAuth repos"""
+ """Drop OAuth repos."""
db = schema_editor.connection.alias
RemoteRepository = apps.get_model('oauth', 'RemoteRepository')
RemoteOrganization = apps.get_model('oauth', 'RemoteOrganization')
diff --git a/readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py b/readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py
index 628891ff795..5b00b8377e5 100644
--- a/readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py
+++ b/readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations
def forwards_remove_content_types(apps, schema_editor):
@@ -10,8 +7,10 @@ def forwards_remove_content_types(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
- model__in=['githubproject', 'githuborganization',
- 'bitbucketproject', 'bitbucketteam']
+ model__in=[
+ 'githubproject', 'githuborganization',
+ 'bitbucketproject', 'bitbucketteam',
+ ],
).delete()
diff --git a/readthedocs/oauth/migrations/0005_add_account_relation.py b/readthedocs/oauth/migrations/0005_add_account_relation.py
index 100bcd71aef..c8c466db37f 100644
--- a/readthedocs/oauth/migrations/0005_add_account_relation.py
+++ b/readthedocs/oauth/migrations/0005_add_account_relation.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/oauth/migrations/0006_move_oauth_source.py b/readthedocs/oauth/migrations/0006_move_oauth_source.py
index a19d0be04a7..8689b134fa2 100644
--- a/readthedocs/oauth/migrations/0006_move_oauth_source.py
+++ b/readthedocs/oauth/migrations/0006_move_oauth_source.py
@@ -1,12 +1,9 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations
def forwards_move_repo_source(apps, schema_editor):
- """Use source field to set repository account"""
+ """Use source field to set repository account."""
RemoteRepository = apps.get_model('oauth', 'RemoteRepository')
SocialAccount = apps.get_model('socialaccount', 'SocialAccount')
for account in SocialAccount.objects.all():
@@ -16,7 +13,7 @@ def forwards_move_repo_source(apps, schema_editor):
def backwards_move_repo_source(apps, schema_editor):
- RemoteRepository = apps.get_model('oauth', 'RemoteRepository')
+ apps.get_model('oauth', 'RemoteRepository')
SocialAccount = apps.get_model('socialaccount', 'SocialAccount')
for account in SocialAccount.objects.all():
rows = (account.remote_repositories
@@ -24,7 +21,7 @@ def backwards_move_repo_source(apps, schema_editor):
def forwards_move_org_source(apps, schema_editor):
- """Use source field to set organization account"""
+ """Use source field to set organization account."""
RemoteOrganization = apps.get_model('oauth', 'RemoteOrganization')
SocialAccount = apps.get_model('socialaccount', 'SocialAccount')
for account in SocialAccount.objects.all():
@@ -34,8 +31,8 @@ def forwards_move_org_source(apps, schema_editor):
def backwards_move_org_source(apps, schema_editor):
- """Use source field to set organization account"""
- RemoteOrganization = apps.get_model('oauth', 'RemoteOrganization')
+ """Use source field to set organization account."""
+ apps.get_model('oauth', 'RemoteOrganization')
SocialAccount = apps.get_model('socialaccount', 'SocialAccount')
for account in SocialAccount.objects.all():
rows = (account.remote_organizations
@@ -49,8 +46,12 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunPython(forwards_move_repo_source,
- backwards_move_repo_source),
- migrations.RunPython(forwards_move_org_source,
- backwards_move_org_source),
+ migrations.RunPython(
+ forwards_move_repo_source,
+ backwards_move_repo_source,
+ ),
+ migrations.RunPython(
+ forwards_move_org_source,
+ backwards_move_org_source,
+ ),
]
diff --git a/readthedocs/oauth/migrations/0007_org_slug_nonunique.py b/readthedocs/oauth/migrations/0007_org_slug_nonunique.py
index 65f6f4f4f70..97078038491 100644
--- a/readthedocs/oauth/migrations/0007_org_slug_nonunique.py
+++ b/readthedocs/oauth/migrations/0007_org_slug_nonunique.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/oauth/migrations/0008_add-project-relation.py b/readthedocs/oauth/migrations/0008_add-project-relation.py
index 1e2a478e69f..070b57e654c 100644
--- a/readthedocs/oauth/migrations/0008_add-project-relation.py
+++ b/readthedocs/oauth/migrations/0008_add-project-relation.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-03-22 20:10
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import migrations, models
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/oauth/migrations/0009_add_missing_model_change_migrations.py b/readthedocs/oauth/migrations/0009_add_missing_model_change_migrations.py
index 015c233ac20..c23743a846a 100644
--- a/readthedocs/oauth/migrations/0009_add_missing_model_change_migrations.py
+++ b/readthedocs/oauth/migrations/0009_add_missing_model_change_migrations.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-31 11:25
-from __future__ import unicode_literals
-
import django.core.validators
from django.db import migrations, models
diff --git a/readthedocs/oauth/models.py b/readthedocs/oauth/models.py
index b93f71b9faa..40d224df6b3 100644
--- a/readthedocs/oauth/models.py
+++ b/readthedocs/oauth/models.py
@@ -1,19 +1,15 @@
# -*- coding: utf-8 -*-
-"""OAuth service models."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""OAuth service models."""
import json
-from builtins import object
from allauth.socialaccount.models import SocialAccount
-from django.conf import settings
from django.contrib.auth.models import User
-from django.urls import reverse
from django.core.validators import URLValidator
from django.db import models
from django.db.models import Q
+from django.urls import reverse
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@@ -37,10 +33,17 @@ class RemoteOrganization(models.Model):
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
users = models.ManyToManyField(
- User, verbose_name=_('Users'), related_name='oauth_organizations')
+ User,
+ verbose_name=_('Users'),
+ related_name='oauth_organizations',
+ )
account = models.ForeignKey(
- SocialAccount, verbose_name=_('Connected account'),
- related_name='remote_organizations', null=True, blank=True)
+ SocialAccount,
+ verbose_name=_('Connected account'),
+ related_name='remote_organizations',
+ null=True,
+ blank=True,
+ )
active = models.BooleanField(_('Active'), default=False)
slug = models.CharField(_('Slug'), max_length=255)
@@ -48,7 +51,11 @@ class RemoteOrganization(models.Model):
email = models.EmailField(_('Email'), max_length=255, null=True, blank=True)
avatar_url = models.URLField(_('Avatar image URL'), null=True, blank=True)
url = models.URLField(
- _('URL to organization page'), max_length=200, null=True, blank=True)
+ _('URL to organization page'),
+ max_length=200,
+ null=True,
+ blank=True,
+ )
json = models.TextField(_('Serialized API response'))
@@ -82,13 +89,24 @@ class RemoteRepository(models.Model):
# This should now be a OneToOne
users = models.ManyToManyField(
- User, verbose_name=_('Users'), related_name='oauth_repositories')
+ User,
+ verbose_name=_('Users'),
+ related_name='oauth_repositories',
+ )
account = models.ForeignKey(
- SocialAccount, verbose_name=_('Connected account'),
- related_name='remote_repositories', null=True, blank=True)
+ SocialAccount,
+ verbose_name=_('Connected account'),
+ related_name='remote_repositories',
+ null=True,
+ blank=True,
+ )
organization = models.ForeignKey(
- RemoteOrganization, verbose_name=_('Organization'),
- related_name='repositories', null=True, blank=True)
+ RemoteOrganization,
+ verbose_name=_('Organization'),
+ related_name='repositories',
+ null=True,
+ blank=True,
+ )
active = models.BooleanField(_('Active'), default=False)
project = models.OneToOneField(
@@ -123,7 +141,7 @@ class RemoteRepository(models.Model):
max_length=512,
blank=True,
validators=[
- URLValidator(schemes=['http', 'https', 'ssh', 'git', 'svn'])
+ URLValidator(schemes=['http', 'https', 'ssh', 'git', 'svn']),
],
)
html_url = models.URLField(_('HTML URL'), null=True, blank=True)
@@ -141,7 +159,7 @@ class RemoteRepository(models.Model):
objects = RemoteRepositoryQuerySet.as_manager()
- class Meta(object):
+ class Meta:
ordering = ['organization__name', 'name']
verbose_name_plural = 'remote repositories'
@@ -160,7 +178,6 @@ def get_serialized(self, key=None, default=None):
@property
def clone_fuzzy_url(self):
"""Try to match against several permutations of project URL."""
- pass
def matches(self, user):
"""Projects that exist with repository URL already."""
diff --git a/readthedocs/oauth/notifications.py b/readthedocs/oauth/notifications.py
index fc9aefc0c3b..c5c5165aa7f 100644
--- a/readthedocs/oauth/notifications.py
+++ b/readthedocs/oauth/notifications.py
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from messages_extends.constants import ERROR_PERSISTENT
@@ -17,12 +15,16 @@ class AttachWebhookNotification(SiteNotification):
context_object_name = 'provider'
success_message = _('Webhook successfully added.')
failure_message = {
- NO_PERMISSIONS: _('Could not add webhook for {{ project.name }}. Make sure you have the correct {{ provider.name }} permissions.'), # noqa
- NO_ACCOUNTS: _('Could not add webhook for {{ project.name }}. Please connect your {{ provider.name }} account.'), # noqa
+ NO_PERMISSIONS: _(
+ 'Could not add webhook for {{ project.name }}. Make sure you have the correct {{ provider.name }} permissions.', # noqa
+ ),
+ NO_ACCOUNTS: _(
+ 'Could not add webhook for {{ project.name }}. Please connect your {{ provider.name }} account.', # noqa
+ ),
}
def get_context_data(self):
- context = super(AttachWebhookNotification, self).get_context_data()
+ context = super().get_context_data()
project = self.extra_context.get('project')
context.update({
'url_connect_account': reverse(
@@ -41,10 +43,11 @@ class InvalidProjectWebhookNotification(SiteNotification):
failure_message = _(
"The project {{ project.name }} doesn't have a valid webhook set up, "
"commits won't trigger new builds for this project. "
- "See the project integrations for more information.") # noqa
+ "See the project integrations for more information.",
+ ) # noqa
def get_context_data(self):
- context = super(InvalidProjectWebhookNotification, self).get_context_data()
+ context = super().get_context_data()
context.update({
'url_integrations': reverse(
'projects_integrations',
diff --git a/readthedocs/oauth/querysets.py b/readthedocs/oauth/querysets.py
index d01703eb1f0..e7f20dc184e 100644
--- a/readthedocs/oauth/querysets.py
+++ b/readthedocs/oauth/querysets.py
@@ -1,6 +1,6 @@
-"""Managers for OAuth models"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Managers for OAuth models."""
from django.db import models
@@ -12,7 +12,7 @@ class RelatedUserQuerySetBase(models.QuerySet):
"""For models with relations through :py:class:`User`"""
def api(self, user=None):
- """Return objects for user"""
+ """Return objects for user."""
if not user.is_authenticated:
return self.none()
return self.filter(users=user)
diff --git a/readthedocs/oauth/services/__init__.py b/readthedocs/oauth/services/__init__.py
index b1b5003b08a..a249e15d934 100644
--- a/readthedocs/oauth/services/__init__.py
+++ b/readthedocs/oauth/services/__init__.py
@@ -1,8 +1,13 @@
# -*- coding: utf-8 -*-
+
"""Conditional classes for OAuth services."""
from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+ absolute_import,
+ division,
+ print_function,
+ unicode_literals,
+)
from readthedocs.core.utils.extend import SettingsOverrideObject
from readthedocs.oauth.services import bitbucket, github, gitlab
diff --git a/readthedocs/oauth/services/base.py b/readthedocs/oauth/services/base.py
index 93064779ef9..b1f0e7a12c5 100644
--- a/readthedocs/oauth/services/base.py
+++ b/readthedocs/oauth/services/base.py
@@ -1,25 +1,23 @@
# -*- coding: utf-8 -*-
-"""OAuth utility functions."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""OAuth utility functions."""
import logging
from datetime import datetime
from allauth.socialaccount.models import SocialAccount
from allauth.socialaccount.providers import registry
-from builtins import object
from django.conf import settings
from django.utils import timezone
from oauthlib.oauth2.rfc6749.errors import InvalidClientIdError
from requests.exceptions import RequestException
from requests_oauthlib import OAuth2Session
+
log = logging.getLogger(__name__)
-class Service(object):
+class Service:
"""
Service mapping for local accounts.
@@ -118,10 +116,11 @@ def token_updater(self, token):
u'expires_at': 1449218652.558185
}
"""
+
def _updater(data):
token.token = data['access_token']
token.expires_at = timezone.make_aware(
- datetime.fromtimestamp(data['expires_at'])
+ datetime.fromtimestamp(data['expires_at']),
)
token.save()
log.info('Updated token %s:', token)
diff --git a/readthedocs/oauth/services/bitbucket.py b/readthedocs/oauth/services/bitbucket.py
index 0ce2f55a21c..817ea98bd75 100644
--- a/readthedocs/oauth/services/bitbucket.py
+++ b/readthedocs/oauth/services/bitbucket.py
@@ -1,16 +1,17 @@
+# -*- coding: utf-8 -*-
+
"""OAuth utility functions."""
-from __future__ import absolute_import
-from builtins import str
-import logging
import json
+import logging
import re
+from allauth.socialaccount.providers.bitbucket_oauth2.views import (
+ BitbucketOAuth2Adapter,
+)
from django.conf import settings
from django.urls import reverse
from requests.exceptions import RequestException
-from allauth.socialaccount.providers.bitbucket_oauth2.views import (
- BitbucketOAuth2Adapter)
from readthedocs.builds import utils as build_utils
from readthedocs.integrations.models import Integration
@@ -41,25 +42,30 @@ def sync_repositories(self):
# Get user repos
try:
repos = self.paginate(
- 'https://bitbucket.org/api/2.0/repositories/?role=member')
+ 'https://bitbucket.org/api/2.0/repositories/?role=member',
+ )
for repo in repos:
self.create_repository(repo)
- except (TypeError, ValueError) as e:
+ except (TypeError, ValueError):
log.exception('Error syncing Bitbucket repositories')
- raise Exception('Could not sync your Bitbucket repositories, '
- 'try reconnecting your account')
+ raise Exception(
+ 'Could not sync your Bitbucket repositories, '
+ 'try reconnecting your account',
+ )
# Because privileges aren't returned with repository data, run query
# again for repositories that user has admin role for, and update
# existing repositories.
try:
resp = self.paginate(
- 'https://bitbucket.org/api/2.0/repositories/?role=admin')
+ 'https://bitbucket.org/api/2.0/repositories/?role=admin',
+ )
repos = (
- RemoteRepository.objects
- .filter(users=self.user,
- full_name__in=[r['full_name'] for r in resp],
- account=self.account)
+ RemoteRepository.objects.filter(
+ users=self.user,
+ full_name__in=[r['full_name'] for r in resp],
+ account=self.account,
+ )
)
for repo in repos:
repo.admin = True
@@ -71,17 +77,19 @@ def sync_teams(self):
"""Sync Bitbucket teams and team repositories."""
try:
teams = self.paginate(
- 'https://api.bitbucket.org/2.0/teams/?role=member'
+ 'https://api.bitbucket.org/2.0/teams/?role=member',
)
for team in teams:
org = self.create_organization(team)
repos = self.paginate(team['links']['repositories']['href'])
for repo in repos:
self.create_repository(repo, organization=org)
- except ValueError as e:
+ except ValueError:
log.exception('Error syncing Bitbucket organizations')
- raise Exception('Could not sync your Bitbucket team repositories, '
- 'try reconnecting your account')
+ raise Exception(
+ 'Could not sync your Bitbucket team repositories, '
+ 'try reconnecting your account',
+ )
def create_repository(self, fields, privacy=None, organization=None):
"""
@@ -99,17 +107,17 @@ def create_repository(self, fields, privacy=None, organization=None):
:rtype: RemoteRepository
"""
privacy = privacy or settings.DEFAULT_PRIVACY_LEVEL
- if (
- (privacy == 'private') or
- (fields['is_private'] is False and privacy == 'public')
- ):
+ if ((privacy == 'private') or
+ (fields['is_private'] is False and privacy == 'public')):
repo, _ = RemoteRepository.objects.get_or_create(
full_name=fields['full_name'],
account=self.account,
)
if repo.organization and repo.organization != organization:
- log.debug('Not importing %s because mismatched orgs',
- fields['name'])
+ log.debug(
+ 'Not importing %s because mismatched orgs',
+ fields['name'],
+ )
return None
repo.organization = organization
@@ -119,11 +127,13 @@ def create_repository(self, fields, privacy=None, organization=None):
repo.private = fields['is_private']
# Default to HTTPS, use SSH for private repositories
- clone_urls = dict((u['name'], u['href'])
- for u in fields['links']['clone'])
+ clone_urls = {
+ u['name']: u['href']
+ for u in fields['links']['clone']
+ }
repo.clone_url = self.https_url_pattern.sub(
'https://bitbucket.org/',
- clone_urls.get('https')
+ clone_urls.get('https'),
)
repo.ssh_url = clone_urls.get('ssh')
if repo.private:
@@ -179,14 +189,18 @@ def get_paginated_results(self, response):
def get_webhook_data(self, project, integration):
"""Get webhook JSON data to post to the API."""
return json.dumps({
- 'description': 'Read the Docs ({domain})'.format(domain=settings.PRODUCTION_DOMAIN),
+ 'description': 'Read the Docs ({domain})'.format(
+ domain=settings.PRODUCTION_DOMAIN,
+ ),
'url': 'https://{domain}{path}'.format(
domain=settings.PRODUCTION_DOMAIN,
path=reverse(
'api_webhook',
- kwargs={'project_slug': project.slug,
- 'integration_pk': integration.pk}
- )
+ kwargs={
+ 'project_slug': project.slug,
+ 'integration_pk': integration.pk,
+ },
+ ),
),
'active': True,
'events': ['repo:push'],
@@ -211,10 +225,12 @@ def setup_webhook(self, project):
resp = None
try:
resp = session.post(
- ('https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/hooks'
- .format(owner=owner, repo=repo)),
+ (
+ 'https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/hooks'
+ .format(owner=owner, repo=repo)
+ ),
data=data,
- headers={'content-type': 'application/json'}
+ headers={'content-type': 'application/json'},
)
if resp.status_code == 201:
recv_data = resp.json()
@@ -265,7 +281,7 @@ def update_webhook(self, project, integration):
resp = session.put(
url,
data=data,
- headers={'content-type': 'application/json'}
+ headers={'content-type': 'application/json'},
)
if resp.status_code == 200:
recv_data = resp.json()
diff --git a/readthedocs/oauth/services/github.py b/readthedocs/oauth/services/github.py
index 5d6164cc061..099743cd6bb 100644
--- a/readthedocs/oauth/services/github.py
+++ b/readthedocs/oauth/services/github.py
@@ -1,16 +1,16 @@
+# -*- coding: utf-8 -*-
+
"""OAuth utility functions."""
-from __future__ import absolute_import
-from builtins import str
-import logging
import json
+import logging
import re
+from allauth.socialaccount.models import SocialToken
+from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from django.conf import settings
from django.urls import reverse
from requests.exceptions import RequestException
-from allauth.socialaccount.models import SocialToken
-from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from readthedocs.builds import utils as build_utils
from readthedocs.integrations.models import Integration
@@ -19,6 +19,7 @@
from ..models import RemoteOrganization, RemoteRepository
from .base import Service
+
log = logging.getLogger(__name__)
@@ -41,10 +42,12 @@ def sync_repositories(self):
try:
for repo in repos:
self.create_repository(repo)
- except (TypeError, ValueError) as e:
+ except (TypeError, ValueError):
log.exception('Error syncing GitHub repositories')
- raise Exception('Could not sync your GitHub repositories, '
- 'try reconnecting your account')
+ raise Exception(
+ 'Could not sync your GitHub repositories, '
+ 'try reconnecting your account',
+ )
def sync_organizations(self):
"""Sync organizations from GitHub API."""
@@ -56,14 +59,16 @@ def sync_organizations(self):
# Add repos
# TODO ?per_page=100
org_repos = self.paginate(
- '{org_url}/repos'.format(org_url=org['url'])
+ '{org_url}/repos'.format(org_url=org['url']),
)
for repo in org_repos:
self.create_repository(repo, organization=org_obj)
- except (TypeError, ValueError) as e:
+ except (TypeError, ValueError):
log.exception('Error syncing GitHub organizations')
- raise Exception('Could not sync your GitHub organizations, '
- 'try reconnecting your account')
+ raise Exception(
+ 'Could not sync your GitHub organizations, '
+ 'try reconnecting your account',
+ )
def create_repository(self, fields, privacy=None, organization=None):
"""
@@ -76,10 +81,8 @@ def create_repository(self, fields, privacy=None, organization=None):
:rtype: RemoteRepository
"""
privacy = privacy or settings.DEFAULT_PRIVACY_LEVEL
- if (
- (privacy == 'private') or
- (fields['private'] is False and privacy == 'public')
- ):
+ if ((privacy == 'private') or
+ (fields['private'] is False and privacy == 'public')):
try:
repo = RemoteRepository.objects.get(
full_name=fields['full_name'],
@@ -93,8 +96,10 @@ def create_repository(self, fields, privacy=None, organization=None):
)
repo.users.add(self.user)
if repo.organization and repo.organization != organization:
- log.debug('Not importing %s because mismatched orgs',
- fields['name'])
+ log.debug(
+ 'Not importing %s because mismatched orgs',
+ fields['name'],
+ )
return None
repo.organization = organization
@@ -117,8 +122,10 @@ def create_repository(self, fields, privacy=None, organization=None):
repo.save()
return repo
else:
- log.debug('Not importing %s because mismatched type',
- fields['name'])
+ log.debug(
+ 'Not importing %s because mismatched type',
+ fields['name'],
+ )
def create_organization(self, fields):
"""
@@ -166,9 +173,11 @@ def get_webhook_data(self, project, integration):
domain=settings.PRODUCTION_DOMAIN,
path=reverse(
'api_webhook',
- kwargs={'project_slug': project.slug,
- 'integration_pk': integration.pk}
- )
+ kwargs={
+ 'project_slug': project.slug,
+ 'integration_pk': integration.pk,
+ },
+ ),
),
'content_type': 'json',
},
@@ -194,18 +203,22 @@ def setup_webhook(self, project):
resp = None
try:
resp = session.post(
- ('https://api.github.com/repos/{owner}/{repo}/hooks'
- .format(owner=owner, repo=repo)),
+ (
+ 'https://api.github.com/repos/{owner}/{repo}/hooks'
+ .format(owner=owner, repo=repo)
+ ),
data=data,
- headers={'content-type': 'application/json'}
+ headers={'content-type': 'application/json'},
)
# GitHub will return 200 if already synced
if resp.status_code in [200, 201]:
recv_data = resp.json()
integration.provider_data = recv_data
integration.save()
- log.info('GitHub webhook creation successful for project: %s',
- project)
+ log.info(
+ 'GitHub webhook creation successful for project: %s',
+ project,
+ )
return (True, resp)
if resp.status_code in [401, 403, 404]:
@@ -257,7 +270,7 @@ def update_webhook(self, project, integration):
resp = session.patch(
url,
data=data,
- headers={'content-type': 'application/json'}
+ headers={'content-type': 'application/json'},
)
# GitHub will return 200 if already synced
if resp.status_code in [200, 201]:
@@ -310,7 +323,8 @@ def get_token_for_project(cls, project, force_local=False):
for user in project.users.all():
tokens = SocialToken.objects.filter(
account__user=user,
- app__provider=cls.adapter.provider_id)
+ app__provider=cls.adapter.provider_id,
+ )
if tokens.exists():
token = tokens[0].token
except Exception:
diff --git a/readthedocs/oauth/services/gitlab.py b/readthedocs/oauth/services/gitlab.py
index b9562617adf..5f94bd51690 100644
--- a/readthedocs/oauth/services/gitlab.py
+++ b/readthedocs/oauth/services/gitlab.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
-"""OAuth utility functions."""
-from __future__ import division, print_function, unicode_literals
+"""OAuth utility functions."""
import json
import logging
@@ -19,6 +18,7 @@
from ..models import RemoteOrganization, RemoteRepository
from .base import Service
+
try:
from urlparse import urljoin, urlparse
except ImportError:
@@ -41,7 +41,8 @@ class GitLabService(Service):
# Just use the network location to determine if it's a GitLab project
# because private repos have another base url, eg. git@gitlab.example.com
url_pattern = re.compile(
- re.escape(urlparse(adapter.provider_base_url).netloc))
+ re.escape(urlparse(adapter.provider_base_url).netloc),
+ )
def _get_repo_id(self, project):
# The ID or URL-encoded path of the project
@@ -94,7 +95,8 @@ def sync_repositories(self):
log.exception('Error syncing GitLab repositories')
raise Exception(
'Could not sync your GitLab repositories, try reconnecting '
- 'your account')
+ 'your account',
+ )
def sync_organizations(self):
orgs = self.paginate(
@@ -124,7 +126,8 @@ def sync_organizations(self):
log.exception('Error syncing GitLab organizations')
raise Exception(
'Could not sync your GitLab organization, try reconnecting '
- 'your account')
+ 'your account',
+ )
def is_owned_by(self, owner_id):
return self.account.extra_data['id'] == owner_id
@@ -349,7 +352,9 @@ def update_webhook(self, project, integration):
integration.provider_data = recv_data
integration.save()
log.info(
- 'GitLab webhook update successful for project: %s', project)
+ 'GitLab webhook update successful for project: %s',
+ project,
+ )
return (True, resp)
# GitLab returns 404 when the webhook doesn't exist. In this case,
@@ -360,7 +365,9 @@ def update_webhook(self, project, integration):
# Catch exceptions with request or deserializing JSON
except (RequestException, ValueError):
log.exception(
- 'GitLab webhook update failed for project: %s', project)
+ 'GitLab webhook update failed for project: %s',
+ project,
+ )
else:
log.error(
'GitLab webhook update failed for project: %s',
diff --git a/readthedocs/oauth/tasks.py b/readthedocs/oauth/tasks.py
index 45b49ceac09..18c0ccb2eca 100644
--- a/readthedocs/oauth/tasks.py
+++ b/readthedocs/oauth/tasks.py
@@ -1,12 +1,6 @@
# -*- coding: utf-8 -*-
-"""Tasks for OAuth services."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Tasks for OAuth services."""
import logging
@@ -23,6 +17,7 @@
from .services import registry
+
log = logging.getLogger(__name__)
diff --git a/readthedocs/oauth/utils.py b/readthedocs/oauth/utils.py
index 64e7dc7ed07..b33fc9a6e65 100644
--- a/readthedocs/oauth/utils.py
+++ b/readthedocs/oauth/utils.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""Support code for OAuth, including webhook support."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Support code for OAuth, including webhook support."""
import logging
@@ -11,9 +9,14 @@
from readthedocs.integrations.models import Integration
from readthedocs.oauth.services import (
- BitbucketService, GitHubService, GitLabService, registry)
+ BitbucketService,
+ GitHubService,
+ GitLabService,
+ registry,
+)
from readthedocs.projects.models import Project
+
log = logging.getLogger(__name__)
SERVICE_MAP = {
@@ -52,7 +55,9 @@ def update_webhook(project, integration, request=None):
request,
_(
'Webhook activation failed. '
- 'Make sure you have the necessary permissions.'))
+ 'Make sure you have the necessary permissions.',
+ ),
+ )
project.has_valid_webhook = False
project.save()
return False
diff --git a/readthedocs/payments/forms.py b/readthedocs/payments/forms.py
index eae6e88dcb6..59f9bce97eb 100644
--- a/readthedocs/payments/forms.py
+++ b/readthedocs/payments/forms.py
@@ -1,21 +1,21 @@
+# -*- coding: utf-8 -*-
+
"""Payment forms."""
-from __future__ import absolute_import
-from builtins import str
-from builtins import object
import logging
-from stripe import Customer, Charge
-from stripe.error import InvalidRequestError
from django import forms
from django.utils.translation import ugettext_lazy as _
+from stripe import Charge, Customer
+from stripe.error import InvalidRequestError
from .utils import stripe
+
log = logging.getLogger(__name__)
-class StripeResourceMixin(object):
+class StripeResourceMixin:
"""Stripe actions for resources, available as a Form mixin class."""
@@ -38,23 +38,29 @@ def get_customer_kwargs(self):
raise NotImplementedError
def get_customer(self):
- return self.ensure_stripe_resource(resource=Customer,
- attrs=self.get_customer_kwargs())
+ return self.ensure_stripe_resource(
+ resource=Customer,
+ attrs=self.get_customer_kwargs(),
+ )
def get_subscription_kwargs(self):
raise NotImplementedError
def get_subscription(self):
customer = self.get_customer()
- return self.ensure_stripe_resource(resource=customer.subscriptions,
- attrs=self.get_subscription_kwargs())
+ return self.ensure_stripe_resource(
+ resource=customer.subscriptions,
+ attrs=self.get_subscription_kwargs(),
+ )
def get_charge_kwargs(self):
raise NotImplementedError
def get_charge(self):
- return self.ensure_stripe_resource(resource=Charge,
- attrs=self.get_charge_kwargs())
+ return self.ensure_stripe_resource(
+ resource=Charge,
+ attrs=self.get_charge_kwargs(),
+ )
class StripeModelForm(forms.ModelForm):
@@ -79,45 +85,62 @@ class StripeModelForm(forms.ModelForm):
# Stripe token input from Stripe.js
stripe_token = forms.CharField(
required=False,
- widget=forms.HiddenInput(attrs={
- 'data-bind': 'valueInit: stripe_token',
- })
+ widget=forms.HiddenInput(
+ attrs={
+ 'data-bind': 'valueInit: stripe_token',
+ },
+ ),
)
# Fields used for fetching token with javascript, listed as form fields so
# that data can survive validation errors
cc_number = forms.CharField(
label=_('Card number'),
- widget=forms.TextInput(attrs={
- 'data-bind': ('valueInit: cc_number, '
- 'textInput: cc_number, '
- '''css: {'field-error': error_cc_number() != null}''')
- }),
+ widget=forms.TextInput(
+ attrs={
+ 'data-bind': (
+ 'valueInit: cc_number, '
+ 'textInput: cc_number, '
+ '''css: {'field-error': error_cc_number() != null}'''
+ ),
+ },
+ ),
max_length=25,
- required=False)
+ required=False,
+ )
cc_expiry = forms.CharField(
label=_('Card expiration'),
- widget=forms.TextInput(attrs={
- 'data-bind': ('valueInit: cc_expiry, '
- 'textInput: cc_expiry, '
- '''css: {'field-error': error_cc_expiry() != null}''')
- }),
+ widget=forms.TextInput(
+ attrs={
+ 'data-bind': (
+ 'valueInit: cc_expiry, '
+ 'textInput: cc_expiry, '
+ '''css: {'field-error': error_cc_expiry() != null}'''
+ ),
+ },
+ ),
max_length=10,
- required=False)
+ required=False,
+ )
cc_cvv = forms.CharField(
label=_('Card CVV'),
- widget=forms.TextInput(attrs={
- 'data-bind': ('valueInit: cc_cvv, '
- 'textInput: cc_cvv, '
- '''css: {'field-error': error_cc_cvv() != null}'''),
- 'autocomplete': 'off',
- }),
+ widget=forms.TextInput(
+ attrs={
+ 'data-bind': (
+ 'valueInit: cc_cvv, '
+ 'textInput: cc_cvv, '
+ '''css: {'field-error': error_cc_cvv() != null}'''
+ ),
+ 'autocomplete': 'off',
+ },
+ ),
max_length=8,
- required=False)
+ required=False,
+ )
def __init__(self, *args, **kwargs):
self.customer = kwargs.pop('customer', None)
- super(StripeModelForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def validate_stripe(self):
"""
@@ -147,7 +170,7 @@ def clean(self):
raise any issues as validation errors. This is required because part of
Stripe's validation happens on the API call to establish a subscription.
"""
- cleaned_data = super(StripeModelForm, self).clean()
+ cleaned_data = super().clean()
# Form isn't valid, no need to try to associate a card now
if not self.is_valid():
@@ -173,7 +196,8 @@ def clean(self):
except stripe.error.StripeError as e:
log.exception('There was a problem communicating with Stripe')
raise forms.ValidationError(
- _('There was a problem communicating with Stripe'))
+ _('There was a problem communicating with Stripe'),
+ )
return cleaned_data
def clear_card_data(self):
@@ -186,12 +210,14 @@ def clear_card_data(self):
try:
self.data['stripe_token'] = None
except AttributeError:
- raise AttributeError('Form was passed immutable QueryDict POST data')
+ raise AttributeError(
+ 'Form was passed immutable QueryDict POST data',
+ )
def fields_with_cc_group(self):
group = {
'is_cc_group': True,
- 'fields': []
+ 'fields': [],
}
for field in self:
if field.name in ['cc_number', 'cc_expiry', 'cc_cvv']:
diff --git a/readthedocs/payments/mixins.py b/readthedocs/payments/mixins.py
index 0219da08098..4bce56d8216 100644
--- a/readthedocs/payments/mixins.py
+++ b/readthedocs/payments/mixins.py
@@ -1,16 +1,16 @@
+# -*- coding: utf-8 -*-
+
"""Payment view mixin classes."""
-from __future__ import absolute_import
-from builtins import object
from django.conf import settings
-class StripeMixin(object):
+class StripeMixin:
"""Adds Stripe publishable key to the context data."""
def get_context_data(self, **kwargs):
- context = super(StripeMixin, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['stripe_publishable'] = settings.STRIPE_PUBLISHABLE
return context
diff --git a/readthedocs/payments/utils.py b/readthedocs/payments/utils.py
index a65b5b0a8f6..ba1b045a2f8 100644
--- a/readthedocs/payments/utils.py
+++ b/readthedocs/payments/utils.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Payment utility functions.
@@ -5,10 +7,10 @@
:py:class:`readthedocs.payments.forms.StripeResourceMixin`.
"""
-from __future__ import absolute_import
import stripe
from django.conf import settings
+
stripe.api_key = getattr(settings, 'STRIPE_SECRET', None)
diff --git a/readthedocs/profiles/urls/private.py b/readthedocs/profiles/urls/private.py
index 85acbf2d9fa..ca6c13428f0 100644
--- a/readthedocs/profiles/urls/private.py
+++ b/readthedocs/profiles/urls/private.py
@@ -1,11 +1,6 @@
-"""URL patterns for views to modify user profiles."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""URL patterns for views to modify user profiles."""
from django.conf.urls import url
@@ -15,12 +10,13 @@
urlpatterns = [
url(
- r'^edit/', views.edit_profile,
+ r'^edit/',
+ views.edit_profile,
{
'form_class': UserProfileForm,
'template_name': 'profiles/private/edit_profile.html',
},
- name='profiles_profile_edit'
+ name='profiles_profile_edit',
),
url(r'^delete/', views.delete_account, name='delete_account'),
url(
diff --git a/readthedocs/profiles/urls/public.py b/readthedocs/profiles/urls/public.py
index 2a9c458e6fd..d2cec291bae 100644
--- a/readthedocs/profiles/urls/public.py
+++ b/readthedocs/profiles/urls/public.py
@@ -1,14 +1,17 @@
+# -*- coding: utf-8 -*-
+
"""URL patterns to view user profiles."""
-from __future__ import absolute_import
from django.conf.urls import url
from readthedocs.profiles import views
urlpatterns = [
- url(r'^(?P[+\w@.-]+)/$',
+ url(
+ r'^(?P[+\w@.-]+)/$',
views.profile_detail,
{'template_name': 'profiles/public/profile_detail.html'},
- name='profiles_profile_detail'),
+ name='profiles_profile_detail',
+ ),
]
diff --git a/readthedocs/profiles/views.py b/readthedocs/profiles/views.py
index d288f4767bf..e2e85b22de8 100644
--- a/readthedocs/profiles/views.py
+++ b/readthedocs/profiles/views.py
@@ -1,20 +1,14 @@
# -*- coding: utf-8 -*-
-"""Views for creating, editing and viewing site-specific user profiles."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Views for creating, editing and viewing site-specific user profiles."""
from django.contrib import messages
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
-from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect, render
+from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from readthedocs.core.forms import UserAdvertisingForm, UserDeleteForm
@@ -22,8 +16,12 @@
@login_required
def edit_profile(
- request, form_class, success_url=None,
- template_name='profiles/private/edit_profile.html', extra_context=None):
+ request,
+ form_class,
+ success_url=None,
+ template_name='profiles/private/edit_profile.html',
+ extra_context=None,
+):
"""
Edit the current user's profile.
@@ -70,10 +68,14 @@ def edit_profile(
if success_url is None:
success_url = reverse(
'profiles_profile_detail',
- kwargs={'username': request.user.username})
+ kwargs={'username': request.user.username},
+ )
if request.method == 'POST':
form = form_class(
- data=request.POST, files=request.FILES, instance=profile_obj)
+ data=request.POST,
+ files=request.FILES,
+ instance=profile_obj,
+ )
if form.is_valid():
form.save()
return HttpResponseRedirect(success_url)
@@ -114,9 +116,12 @@ def delete_account(request):
def profile_detail(
- request, username, public_profile_field=None,
+ request,
+ username,
+ public_profile_field=None,
template_name='profiles/public/profile_detail.html',
- extra_context=None):
+ extra_context=None,
+):
"""
Detail view of a user's profile.
diff --git a/readthedocs/projects/__init__.py b/readthedocs/projects/__init__.py
index ff5ded49b17..186dc5eb841 100644
--- a/readthedocs/projects/__init__.py
+++ b/readthedocs/projects/__init__.py
@@ -1 +1,2 @@
+# -*- coding: utf-8 -*-
default_app_config = 'readthedocs.projects.apps.ProjectsConfig'
diff --git a/readthedocs/projects/admin.py b/readthedocs/projects/admin.py
index 9f6da41ddd9..73914222ce3 100644
--- a/readthedocs/projects/admin.py
+++ b/readthedocs/projects/admin.py
@@ -1,12 +1,6 @@
# -*- coding: utf-8 -*-
-"""Django administration interface for `projects.models`"""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Django administration interface for `projects.models`"""
from django.contrib import admin, messages
from django.contrib.admin.actions import delete_selected
@@ -109,9 +103,7 @@ class ProjectOwnerBannedFilter(admin.SimpleListFilter):
OWNER_BANNED = 'true'
def lookups(self, request, model_admin):
- return (
- (self.OWNER_BANNED, _('Yes')),
- )
+ return ((self.OWNER_BANNED, _('Yes')),)
def queryset(self, request, queryset):
if self.value() == self.OWNER_BANNED:
@@ -125,13 +117,23 @@ class ProjectAdmin(GuardedModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ('name', 'slug', 'repo', 'repo_type', 'featured')
- list_filter = ('repo_type', 'featured', 'privacy_level',
- 'documentation_type', 'programming_language',
- 'feature__feature_id', ProjectOwnerBannedFilter)
+ list_filter = (
+ 'repo_type',
+ 'featured',
+ 'privacy_level',
+ 'documentation_type',
+ 'programming_language',
+ 'feature__feature_id',
+ ProjectOwnerBannedFilter,
+ )
list_editable = ('featured',)
search_fields = ('slug', 'repo')
- inlines = [ProjectRelationshipInline, RedirectInline,
- VersionInline, DomainInline]
+ inlines = [
+ ProjectRelationshipInline,
+ RedirectInline,
+ VersionInline,
+ DomainInline,
+ ]
readonly_fields = ('feature_flags',)
raw_id_fields = ('users', 'main_language_project')
actions = ['send_owner_email', 'ban_owner']
@@ -141,7 +143,7 @@ def feature_flags(self, obj):
def send_owner_email(self, request, queryset):
view = ProjectSendNotificationView.as_view(
- action_name='send_owner_email'
+ action_name='send_owner_email',
)
return view(request, queryset=queryset)
@@ -158,18 +160,25 @@ def ban_owner(self, request, queryset):
total = 0
for project in queryset:
if project.users.count() == 1:
- count = (UserProfile.objects
- .filter(user__projects=project)
- .update(banned=True))
+ count = (
+ UserProfile.objects.filter(user__projects=project
+ ).update(banned=True)
+ ) # yapf: disabled
total += count
else:
- messages.add_message(request, messages.ERROR,
- 'Project has multiple owners: {0}'.format(project))
+ messages.add_message(
+ request,
+ messages.ERROR,
+ 'Project has multiple owners: {}'.format(project),
+ )
if total == 0:
messages.add_message(request, messages.ERROR, 'No users banned')
else:
- messages.add_message(request, messages.INFO,
- 'Banned {0} user(s)'.format(total))
+ messages.add_message(
+ request,
+ messages.INFO,
+ 'Banned {} user(s)'.format(total),
+ )
ban_owner.short_description = 'Ban project owner'
@@ -190,11 +199,11 @@ def delete_selected_and_artifacts(self, request, queryset):
return delete_selected(self, request, queryset)
def get_actions(self, request):
- actions = super(ProjectAdmin, self).get_actions(request)
+ actions = super().get_actions(request)
actions['delete_selected'] = (
self.__class__.delete_selected_and_artifacts,
'delete_selected',
- delete_selected.short_description
+ delete_selected.short_description,
)
return actions
diff --git a/readthedocs/projects/apps.py b/readthedocs/projects/apps.py
index e29afbe49ce..76b3fae1b69 100644
--- a/readthedocs/projects/apps.py
+++ b/readthedocs/projects/apps.py
@@ -1,4 +1,6 @@
-"""Project app config"""
+# -*- coding: utf-8 -*-
+
+"""Project app config."""
from django.apps import AppConfig
diff --git a/readthedocs/projects/backends/views.py b/readthedocs/projects/backends/views.py
index 2b3d0fa41c5..ebae2d8b173 100644
--- a/readthedocs/projects/backends/views.py
+++ b/readthedocs/projects/backends/views.py
@@ -1,11 +1,12 @@
+# -*- coding: utf-8 -*-
+
"""
-Project views loaded by configuration settings
+Project views loaded by configuration settings.
Use these views instead of calling the views directly, in order to allow for
settings override of the view class.
"""
-from __future__ import absolute_import
from readthedocs.core.utils.extend import SettingsOverrideObject
from readthedocs.projects.views import private
diff --git a/readthedocs/projects/constants.py b/readthedocs/projects/constants.py
index 93e9433eb1a..26093e84917 100644
--- a/readthedocs/projects/constants.py
+++ b/readthedocs/projects/constants.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+
"""
Project constants.
@@ -6,13 +7,11 @@
theme names and repository types.
"""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import re
from django.utils.translation import ugettext_lazy as _
+
DOCUMENTATION_CHOICES = (
('sphinx', _('Sphinx Html')),
('mkdocs', _('Mkdocs (Markdown)')),
@@ -311,10 +310,13 @@
]
GITHUB_URL = (
'https://github.com/{user}/{repo}/'
- '{action}/{version}{docroot}{path}{source_suffix}')
+ '{action}/{version}{docroot}{path}{source_suffix}'
+)
BITBUCKET_URL = (
'https://bitbucket.org/{user}/{repo}/'
- 'src/{version}{docroot}{path}{source_suffix}')
+ 'src/{version}{docroot}{path}{source_suffix}'
+)
GITLAB_URL = (
'https://gitlab.com/{user}/{repo}/'
- '{action}/{version}{docroot}{path}{source_suffix}')
+ '{action}/{version}{docroot}{path}{source_suffix}'
+)
diff --git a/readthedocs/projects/exceptions.py b/readthedocs/projects/exceptions.py
index 24a136d09dd..85b439400df 100644
--- a/readthedocs/projects/exceptions.py
+++ b/readthedocs/projects/exceptions.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
-"""Project exceptions."""
-from __future__ import division, print_function, unicode_literals
+"""Project exceptions."""
from django.conf import settings
from django.utils.translation import ugettext_noop as _
@@ -15,13 +14,13 @@ class ProjectConfigurationError(BuildEnvironmentError):
NOT_FOUND = _(
'A configuration file was not found. '
- 'Make sure you have a conf.py file in your repository.'
+ 'Make sure you have a conf.py file in your repository.',
)
MULTIPLE_CONF_FILES = _(
'We found more than one conf.py and are not sure which one to use. '
'Please, specify the correct file under the Advanced settings tab '
- "in the project's Admin."
+ "in the project's Admin.",
)
@@ -31,25 +30,21 @@ class RepositoryError(BuildEnvironmentError):
PRIVATE_ALLOWED = _(
'There was a problem connecting to your repository, '
- 'ensure that your repository URL is correct.'
+ 'ensure that your repository URL is correct.',
)
PRIVATE_NOT_ALLOWED = _(
'There was a problem connecting to your repository, '
'ensure that your repository URL is correct and your repository is public. '
- 'Private repositories are not supported.'
+ 'Private repositories are not supported.',
)
- INVALID_SUBMODULES = _(
- 'One or more submodule URLs are not valid: {}.'
- )
+ INVALID_SUBMODULES = _('One or more submodule URLs are not valid: {}.',)
DUPLICATED_RESERVED_VERSIONS = _(
- 'You can not have two versions with the name latest or stable.'
+ 'You can not have two versions with the name latest or stable.',
)
- FAILED_TO_CHECKOUT = _(
- 'Failed to checkout revision: {}'
- )
+ FAILED_TO_CHECKOUT = _('Failed to checkout revision: {}')
def get_default_message(self):
if settings.ALLOW_PRIVATE_REPOS:
@@ -65,5 +60,3 @@ class ProjectSpamError(Exception):
This error is not raised to users, we use this for banning users in the
background.
"""
-
- pass
diff --git a/readthedocs/projects/feeds.py b/readthedocs/projects/feeds.py
index b3739f4b005..35ebdd6ac32 100644
--- a/readthedocs/projects/feeds.py
+++ b/readthedocs/projects/feeds.py
@@ -1,6 +1,7 @@
-"""Project RSS feeds"""
+# -*- coding: utf-8 -*-
+
+"""Project RSS feeds."""
-from __future__ import absolute_import
from django.contrib.syndication.views import Feed
from readthedocs.projects.models import Project
@@ -8,11 +9,11 @@
class LatestProjectsFeed(Feed):
- """RSS feed for projects that were recently updated"""
+ """RSS feed for projects that were recently updated."""
- title = "Recently updated documentation"
- link = "http://readthedocs.org"
- description = "Recently updated documentation on Read the Docs"
+ title = 'Recently updated documentation'
+ link = 'http://readthedocs.org'
+ description = 'Recently updated documentation on Read the Docs'
def items(self):
return Project.objects.public().order_by('-modified_date')[:10]
@@ -26,11 +27,11 @@ def item_description(self, item):
class NewProjectsFeed(Feed):
- """RSS feed for newly created projects"""
+ """RSS feed for newly created projects."""
- title = "Newest documentation"
- link = "http://readthedocs.org"
- description = "Recently created documentation on Read the Docs"
+ title = 'Newest documentation'
+ link = 'http://readthedocs.org'
+ description = 'Recently created documentation on Read the Docs'
def items(self):
return Project.objects.public().order_by('-pk')[:10]
diff --git a/readthedocs/projects/fixtures/test_auth.json b/readthedocs/projects/fixtures/test_auth.json
index 83d7738406e..c0d160196f1 100644
--- a/readthedocs/projects/fixtures/test_auth.json
+++ b/readthedocs/projects/fixtures/test_auth.json
@@ -701,4 +701,4 @@
"date_joined": "2014-02-09T19:48:39.934+00:00"
}
}
-]
\ No newline at end of file
+]
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py
index 70fcc88d7e6..aae5e9d278c 100644
--- a/readthedocs/projects/forms.py
+++ b/readthedocs/projects/forms.py
@@ -2,35 +2,16 @@
"""Project forms."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
-try:
- # TODO: remove this when we deprecate Python2
- # re.fullmatch is >= Py3.4 only
- from re import fullmatch
-except ImportError:
- # https://stackoverflow.com/questions/30212413/backport-python-3-4s-regular-expression-fullmatch-to-python-2
- import re
-
- def fullmatch(regex, string, flags=0):
- """Emulate python-3.4 re.fullmatch().""" # noqa
- return re.match("(?:" + regex + r")\Z", string, flags=flags)
-
from random import choice
+from re import fullmatch
+from urllib.parse import urlparse
-from builtins import object
from django import forms
from django.conf import settings
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
-from future.backports.urllib.parse import urlparse
from guardian.shortcuts import assign
from textclassifier.validators import ClassifierValidator
@@ -65,17 +46,17 @@ class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
- super(ProjectForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def save(self, commit=True):
- project = super(ProjectForm, self).save(commit)
+ project = super().save(commit)
if commit:
if self.user and not project.users.filter(pk=self.user.pk).exists():
project.users.add(self.user)
return project
-class ProjectTriggerBuildMixin(object):
+class ProjectTriggerBuildMixin:
"""
Mixin to trigger build on form save.
@@ -86,7 +67,7 @@ class ProjectTriggerBuildMixin(object):
def save(self, commit=True):
"""Trigger build on commit save."""
- project = super(ProjectTriggerBuildMixin, self).save(commit)
+ project = super().save(commit)
if commit:
trigger_build(project=project)
return project
@@ -103,7 +84,7 @@ class ProjectBasicsForm(ProjectForm):
"""Form for basic project fields."""
- class Meta(object):
+ class Meta:
model = Project
fields = ('name', 'repo', 'repo_type')
@@ -114,7 +95,7 @@ class Meta(object):
def __init__(self, *args, **kwargs):
show_advanced = kwargs.pop('show_advanced', False)
- super(ProjectBasicsForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if show_advanced:
self.fields['advanced'] = forms.BooleanField(
required=False,
@@ -125,7 +106,7 @@ def __init__(self, *args, **kwargs):
def save(self, commit=True):
"""Add remote repository relationship to the project instance."""
- instance = super(ProjectBasicsForm, self).save(commit)
+ instance = super().save(commit)
remote_repo = self.cleaned_data.get('remote_repository', None)
if remote_repo:
if commit:
@@ -141,12 +122,11 @@ def clean_name(self):
potential_slug = slugify(name)
if Project.objects.filter(slug=potential_slug).exists():
raise forms.ValidationError(
- _('Invalid project name, a project already exists with that name')) # yapf: disable # noqa
+ _('Invalid project name, a project already exists with that name'),
+ ) # yapf: disable # noqa
if not potential_slug:
# Check the generated slug won't be empty
- raise forms.ValidationError(
- _('Invalid project name'),
- )
+ raise forms.ValidationError(_('Invalid project name'),)
return name
@@ -178,7 +158,7 @@ class ProjectExtraForm(ProjectForm):
"""Additional project information form."""
- class Meta(object):
+ class Meta:
model = Project
fields = (
'description',
@@ -200,7 +180,9 @@ def clean_tags(self):
for tag in tags:
if len(tag) > 100:
raise forms.ValidationError(
- _('Length of each tag must be less than or equal to 100 characters.')
+ _(
+ 'Length of each tag must be less than or equal to 100 characters.',
+ ),
)
return tags
@@ -212,11 +194,13 @@ class ProjectAdvancedForm(ProjectTriggerBuildMixin, ProjectForm):
python_interpreter = forms.ChoiceField(
choices=constants.PYTHON_CHOICES,
initial='python',
- help_text=_('The Python interpreter used to create the virtual '
- 'environment.'),
+ help_text=_(
+ 'The Python interpreter used to create the virtual '
+ 'environment.',
+ ),
)
- class Meta(object):
+ class Meta:
model = Project
fields = (
# Standard build edits
@@ -240,35 +224,43 @@ class Meta(object):
)
def __init__(self, *args, **kwargs):
- super(ProjectAdvancedForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
default_choice = (None, '-' * 9)
all_versions = self.instance.versions.values_list(
- 'identifier', 'verbose_name'
+ 'identifier',
+ 'verbose_name',
)
self.fields['default_branch'].widget = forms.Select(
- choices=[default_choice] + list(all_versions)
+ choices=[default_choice] + list(all_versions),
)
active_versions = self.instance.all_active_versions().values_list(
'slug', 'verbose_name'
- )
+ ) # yapf: disabled
self.fields['default_version'].widget = forms.Select(
- choices=active_versions
+ choices=active_versions,
)
def clean_conf_py_file(self):
filename = self.cleaned_data.get('conf_py_file', '').strip()
if filename and 'conf.py' not in filename:
raise forms.ValidationError(
- _('Your configuration file is invalid, make sure it contains '
- 'conf.py in it.')) # yapf: disable
+ _(
+ 'Your configuration file is invalid, make sure it contains '
+ 'conf.py in it.',
+ ),
+ ) # yapf: disable
return filename
-class UpdateProjectForm(ProjectTriggerBuildMixin, ProjectBasicsForm,
- ProjectExtraForm):
- class Meta(object):
+class UpdateProjectForm(
+ ProjectTriggerBuildMixin,
+ ProjectBasicsForm,
+ ProjectExtraForm,
+):
+
+ class Meta:
model = Project
fields = (
# Basics
@@ -290,17 +282,17 @@ def clean_language(self):
if project:
msg = _(
'There is already a "{lang}" translation '
- 'for the {proj} project.'
+ 'for the {proj} project.',
)
if project.translations.filter(language=language).exists():
raise forms.ValidationError(
- msg.format(lang=language, proj=project.slug)
+ msg.format(lang=language, proj=project.slug),
)
main_project = project.main_language_project
if main_project:
if main_project.language == language:
raise forms.ValidationError(
- msg.format(lang=language, proj=main_project.slug)
+ msg.format(lang=language, proj=main_project.slug),
)
siblings = (
main_project.translations
@@ -310,7 +302,7 @@ def clean_language(self):
)
if siblings:
raise forms.ValidationError(
- msg.format(lang=language, proj=main_project.slug)
+ msg.format(lang=language, proj=main_project.slug),
)
return language
@@ -321,14 +313,14 @@ class ProjectRelationshipBaseForm(forms.ModelForm):
parent = forms.CharField(widget=forms.HiddenInput(), required=False)
- class Meta(object):
+ class Meta:
model = ProjectRelationship
fields = '__all__'
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project')
self.user = kwargs.pop('user')
- super(ProjectRelationshipBaseForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# Don't display the update form with an editable child, as it will be
# filtered out from the queryset anyways.
if hasattr(self, 'instance') and self.instance.pk is not None:
@@ -341,14 +333,16 @@ def clean_parent(self):
# This validation error is mostly for testing, users shouldn't see
# this in normal circumstances
raise forms.ValidationError(
- _('Subproject nesting is not supported'))
+ _('Subproject nesting is not supported'),
+ )
return self.project
def clean_child(self):
child = self.cleaned_data['child']
if child == self.project:
raise forms.ValidationError(
- _('A project can not be a subproject of itself'))
+ _('A project can not be a subproject of itself'),
+ )
return child
def get_subproject_queryset(self):
@@ -362,7 +356,8 @@ def get_subproject_queryset(self):
Project.objects.for_admin_user(self.user)
.exclude(subprojects__isnull=False)
.exclude(superprojects__isnull=False)
- .exclude(pk=self.project.pk))
+ .exclude(pk=self.project.pk)
+ )
return queryset
@@ -375,11 +370,11 @@ class DualCheckboxWidget(forms.CheckboxInput):
"""Checkbox with link to the version's built documentation."""
def __init__(self, version, attrs=None, check_test=bool):
- super(DualCheckboxWidget, self).__init__(attrs, check_test)
+ super().__init__(attrs, check_test)
self.version = version
def render(self, name, value, attrs=None, renderer=None):
- checkbox = super(DualCheckboxWidget, self).render(name, value, attrs, renderer)
+ checkbox = super().render(name, value, attrs, renderer)
icon = self.render_icon()
return mark_safe('{}{}'.format(checkbox, icon))
@@ -467,12 +462,14 @@ def build_versions_form(project):
class BaseUploadHTMLForm(forms.Form):
content = forms.FileField(label=_('Zip file of HTML'))
- overwrite = forms.BooleanField(required=False,
- label=_('Overwrite existing HTML?'))
+ overwrite = forms.BooleanField(
+ required=False,
+ label=_('Overwrite existing HTML?'),
+ )
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
- super(BaseUploadHTMLForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def clean(self):
version_slug = self.cleaned_data['version']
@@ -512,14 +509,15 @@ class UserForm(forms.Form):
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(UserForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def clean_user(self):
name = self.cleaned_data['user']
user_qs = User.objects.filter(username=name)
if not user_qs.exists():
raise forms.ValidationError(
- _('User {name} does not exist').format(name=name))
+ _('User {name} does not exist').format(name=name),
+ )
self.user = user_qs[0]
return name
@@ -538,11 +536,13 @@ class EmailHookForm(forms.Form):
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(EmailHookForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def clean_email(self):
self.email = EmailHook.objects.get_or_create(
- email=self.cleaned_data['email'], project=self.project)[0]
+ email=self.cleaned_data['email'],
+ project=self.project,
+ )[0]
return self.email
def save(self):
@@ -556,11 +556,13 @@ class WebHookForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(WebHookForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def save(self, commit=True):
self.webhook = WebHook.objects.get_or_create(
- url=self.cleaned_data['url'], project=self.project)[0]
+ url=self.cleaned_data['url'],
+ project=self.project,
+ )[0]
self.project.webhook_notifications.add(self.webhook)
return self.project
@@ -578,15 +580,17 @@ class TranslationBaseForm(forms.Form):
def __init__(self, *args, **kwargs):
self.parent = kwargs.pop('parent', None)
self.user = kwargs.pop('user')
- super(TranslationBaseForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.fields['project'].choices = self.get_choices()
def get_choices(self):
- return [
- (project.slug, '{project} ({lang})'.format(
- project=project.slug, lang=project.get_language_display()))
- for project in self.get_translation_queryset().all()
- ]
+ return [(
+ project.slug,
+ '{project} ({lang})'.format(
+ project=project.slug,
+ lang=project.get_language_display(),
+ ),
+ ) for project in self.get_translation_queryset().all()]
def clean_project(self):
translation_project_slug = self.cleaned_data['project']
@@ -595,36 +599,35 @@ def clean_project(self):
if self.parent.main_language_project is not None:
msg = 'Project "{project}" is already a translation'
raise forms.ValidationError(
- (_(msg).format(project=self.parent.slug))
+ (_(msg).format(project=self.parent.slug)),
)
project_translation_qs = self.get_translation_queryset().filter(
- slug=translation_project_slug
+ slug=translation_project_slug,
)
if not project_translation_qs.exists():
msg = 'Project "{project}" does not exist.'
raise forms.ValidationError(
- (_(msg).format(project=translation_project_slug))
+ (_(msg).format(project=translation_project_slug)),
)
self.translation = project_translation_qs.first()
if self.translation.language == self.parent.language:
- msg = (
- 'Both projects can not have the same language ({lang}).'
- )
+ msg = ('Both projects can not have the same language ({lang}).')
raise forms.ValidationError(
- _(msg).format(lang=self.parent.get_language_display())
+ _(msg).format(lang=self.parent.get_language_display()),
)
+
+ # yapf: disable
exists_translation = (
self.parent.translations
.filter(language=self.translation.language)
.exists()
)
+ # yapf: enable
if exists_translation:
- msg = (
- 'This project already has a translation for {lang}.'
- )
+ msg = ('This project already has a translation for {lang}.')
raise forms.ValidationError(
- _(msg).format(lang=self.translation.get_language_display())
+ _(msg).format(lang=self.translation.get_language_display()),
)
is_parent = self.translation.translations.exists()
if is_parent:
@@ -659,13 +662,13 @@ class RedirectForm(forms.ModelForm):
"""Form for project redirects."""
- class Meta(object):
+ class Meta:
model = Redirect
fields = ['redirect_type', 'from_url', 'to_url']
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(RedirectForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def save(self, **_): # pylint: disable=arguments-differ
# TODO this should respect the unused argument `commit`. It's not clear
@@ -686,13 +689,13 @@ class DomainBaseForm(forms.ModelForm):
project = forms.CharField(widget=forms.HiddenInput(), required=False)
- class Meta(object):
+ class Meta:
model = Domain
exclude = ['machine', 'cname', 'count'] # pylint: disable=modelform-uses-exclude
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(DomainBaseForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def clean_project(self):
return self.project
@@ -708,11 +711,10 @@ def clean_domain(self):
def clean_canonical(self):
canonical = self.cleaned_data['canonical']
_id = self.initial.get('id')
- if canonical and Domain.objects.filter(
- project=self.project, canonical=True
- ).exclude(pk=_id).exists():
+ if canonical and Domain.objects.filter(project=self.project, canonical=True).exclude(pk=_id).exists(): # yapf: disabled # noqa
raise forms.ValidationError(
- _('Only 1 Domain can be canonical at a time.'))
+ _('Only 1 Domain can be canonical at a time.'),
+ )
return canonical
@@ -730,13 +732,13 @@ class IntegrationForm(forms.ModelForm):
project = forms.CharField(widget=forms.HiddenInput(), required=False)
- class Meta(object):
+ class Meta:
model = Integration
exclude = ['provider_data', 'exchanges'] # pylint: disable=modelform-uses-exclude
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(IntegrationForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# Alter the integration type choices to only provider webhooks
self.fields['integration_type'].choices = Integration.WEBHOOK_INTEGRATIONS # yapf: disable # noqa
@@ -745,20 +747,20 @@ def clean_project(self):
def save(self, commit=True):
self.instance = Integration.objects.subclass(self.instance)
- return super(IntegrationForm, self).save(commit)
+ return super().save(commit)
class ProjectAdvertisingForm(forms.ModelForm):
"""Project promotion opt-out form."""
- class Meta(object):
+ class Meta:
model = Project
fields = ['allow_promos']
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(ProjectAdvertisingForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class FeatureForm(forms.ModelForm):
@@ -773,12 +775,12 @@ class FeatureForm(forms.ModelForm):
feature_id = forms.ChoiceField()
- class Meta(object):
+ class Meta:
model = Feature
fields = ['projects', 'feature_id', 'default_true']
def __init__(self, *args, **kwargs):
- super(FeatureForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.fields['feature_id'].choices = Feature.FEATURES
@@ -792,13 +794,13 @@ class EnvironmentVariableForm(forms.ModelForm):
project = forms.CharField(widget=forms.HiddenInput(), required=False)
- class Meta(object):
+ class Meta:
model = EnvironmentVariable
fields = ('name', 'value', 'project')
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
- super(EnvironmentVariableForm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def clean_project(self):
return self.project
@@ -815,7 +817,9 @@ def clean_name(self):
)
elif self.project.environmentvariable_set.filter(name=name).exists():
raise forms.ValidationError(
- _('There is already a variable with this name for this project'),
+ _(
+ 'There is already a variable with this name for this project',
+ ),
)
elif ' ' in name:
raise forms.ValidationError(
diff --git a/readthedocs/projects/management/commands/import_project_from_live.py b/readthedocs/projects/management/commands/import_project_from_live.py
index 95c85778349..2b006f84ecb 100644
--- a/readthedocs/projects/management/commands/import_project_from_live.py
+++ b/readthedocs/projects/management/commands/import_project_from_live.py
@@ -1,19 +1,21 @@
-"""Import project command"""
+# -*- coding: utf-8 -*-
+
+"""Import project command."""
-from __future__ import absolute_import
-from django.core.management import call_command
-from django.core.management.base import BaseCommand
import json
-import slumber
+import slumber
from django.contrib.auth.models import User
+from django.core.management import call_command
+from django.core.management.base import BaseCommand
+
from ...models import Project
class Command(BaseCommand):
"""
- Import project from production API
+ Import project from production API.
This is a helper to debug issues with projects on the server more easily
locally. It allows you to import projects based on the data that the public
@@ -22,8 +24,8 @@ class Command(BaseCommand):
help = (
"Retrieves the data of a project from readthedocs.org's API and puts "
- "it into the local database. This is mostly useful for debugging "
- "issues with projects on the live site."
+ 'it into the local database. This is mostly useful for debugging '
+ 'issues with projects on the live site.'
)
def add_arguments(self, parser):
@@ -41,10 +43,11 @@ def handle(self, *args, **options):
project_data = project_data['objects'][0]
except (KeyError, IndexError):
self.stderr.write(
- 'Cannot find {slug} in API. Response was:\n{response}'
- .format(
+ 'Cannot find {slug} in API. Response was:\n{response}'.format(
slug=slug,
- response=json.dumps(project_data)))
+ response=json.dumps(project_data),
+ ),
+ )
try:
project = Project.objects.get(slug=slug)
diff --git a/readthedocs/projects/migrations/0001_initial.py b/readthedocs/projects/migrations/0001_initial.py
index 734358ad0b0..00d2a7915b0 100644
--- a/readthedocs/projects/migrations/0001_initial.py
+++ b/readthedocs/projects/migrations/0001_initial.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
-from django.conf import settings
import taggit.managers
+from django.conf import settings
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0002_add_importedfile_model.py b/readthedocs/projects/migrations/0002_add_importedfile_model.py
index a03fff529cb..cfa6f3b9e63 100644
--- a/readthedocs/projects/migrations/0002_add_importedfile_model.py
+++ b/readthedocs/projects/migrations/0002_add_importedfile_model.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0003_project_cdn_enabled.py b/readthedocs/projects/migrations/0003_project_cdn_enabled.py
index 471df331910..e89cfed99ac 100644
--- a/readthedocs/projects/migrations/0003_project_cdn_enabled.py
+++ b/readthedocs/projects/migrations/0003_project_cdn_enabled.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0004_add_project_container_image.py b/readthedocs/projects/migrations/0004_add_project_container_image.py
index 70c969d1be5..724e62e45fc 100644
--- a/readthedocs/projects/migrations/0004_add_project_container_image.py
+++ b/readthedocs/projects/migrations/0004_add_project_container_image.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0005_sync_project_model.py b/readthedocs/projects/migrations/0005_sync_project_model.py
index 75b9e6d5e06..12537572ce8 100644
--- a/readthedocs/projects/migrations/0005_sync_project_model.py
+++ b/readthedocs/projects/migrations/0005_sync_project_model.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0006_add_domain_models.py b/readthedocs/projects/migrations/0006_add_domain_models.py
index 78e05b81e28..e50617a6931 100644
--- a/readthedocs/projects/migrations/0006_add_domain_models.py
+++ b/readthedocs/projects/migrations/0006_add_domain_models.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0007_migrate_canonical_data.py b/readthedocs/projects/migrations/0007_migrate_canonical_data.py
index 743d6a145cb..633f975fc5c 100644
--- a/readthedocs/projects/migrations/0007_migrate_canonical_data.py
+++ b/readthedocs/projects/migrations/0007_migrate_canonical_data.py
@@ -1,13 +1,9 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals, print_function
-
-from __future__ import absolute_import
-from django.db import migrations
-from django.db import transaction
+from django.db import migrations, transaction
def migrate_canonical(apps, schema_editor):
- Project = apps.get_model("projects", "Project")
+ Project = apps.get_model('projects', 'Project')
for project in Project.objects.all():
if project.canonical_url:
try:
@@ -16,11 +12,11 @@ def migrate_canonical(apps, schema_editor):
url=project.canonical_url,
canonical=True,
)
- print(u"Added {url} to {project}".format(url=project.canonical_url, project=project.name))
+ print('Added {url} to {project}'.format(url=project.canonical_url, project=project.name))
except Exception as e:
print(e)
- print(u"Failed adding {url} to {project}".format(
- url=project.canonical_url, project=project.name
+ print('Failed adding {url} to {project}'.format(
+ url=project.canonical_url, project=project.name,
))
@@ -31,5 +27,5 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunPython(migrate_canonical)
+ migrations.RunPython(migrate_canonical),
]
diff --git a/readthedocs/projects/migrations/0008_add_subproject_alias_prefix.py b/readthedocs/projects/migrations/0008_add_subproject_alias_prefix.py
index b3eb933882c..ec61ecabd15 100644
--- a/readthedocs/projects/migrations/0008_add_subproject_alias_prefix.py
+++ b/readthedocs/projects/migrations/0008_add_subproject_alias_prefix.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0009_add_domain_field.py b/readthedocs/projects/migrations/0009_add_domain_field.py
index 20230a4738c..4c910c74e44 100644
--- a/readthedocs/projects/migrations/0009_add_domain_field.py
+++ b/readthedocs/projects/migrations/0009_add_domain_field.py
@@ -1,11 +1,7 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-from __future__ import absolute_import
-from django.db import models, migrations
import django.contrib.sites.models
-
-import uuid
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0010_migrate_domain_data.py b/readthedocs/projects/migrations/0010_migrate_domain_data.py
index ef60b2a00d1..49d3dc07bb9 100644
--- a/readthedocs/projects/migrations/0010_migrate_domain_data.py
+++ b/readthedocs/projects/migrations/0010_migrate_domain_data.py
@@ -1,19 +1,19 @@
# -*- coding: utf-8 -*-
-from __future__ import (absolute_import, print_function, unicode_literals)
+from urllib.parse import urlparse
-from django.db import models, migrations
-from future.backports.urllib.parse import urlparse
+from django.db import migrations, models
import readthedocs.projects.validators
def migrate_url(apps, schema_editor):
- Domain = apps.get_model("projects", "Domain")
+ Domain = apps.get_model('projects', 'Domain')
Domain.objects.filter(count=0).delete()
for domain in Domain.objects.all():
if domain.project.superprojects.count() or domain.project.main_language_project:
- print("{project} is a subproject or translation. Deleting domain.".format(
- project=domain.project.slug))
+ print('{project} is a subproject or translation. Deleting domain.'.format(
+ project=domain.project.slug,
+ ))
domain.delete()
continue
parsed = urlparse(domain.url)
@@ -24,10 +24,10 @@ def migrate_url(apps, schema_editor):
try:
domain.domain = domain_string
domain.save()
- print(u"Added {domain} from {url}".format(url=domain.url, domain=domain_string))
+ print('Added {domain} from {url}'.format(url=domain.url, domain=domain_string))
except Exception as e:
print(e)
- print(u"Failed {domain} from {url}".format(url=domain.url, domain=domain_string))
+ print('Failed {domain} from {url}'.format(url=domain.url, domain=domain_string))
dms = Domain.objects.filter(domain=domain_string).order_by('-count')
if dms.count() > 1:
diff --git a/readthedocs/projects/migrations/0011_delete-url.py b/readthedocs/projects/migrations/0011_delete-url.py
index fcd83c02753..3b01ed32cf9 100644
--- a/readthedocs/projects/migrations/0011_delete-url.py
+++ b/readthedocs/projects/migrations/0011_delete-url.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0012_proper-name-for-install-project.py b/readthedocs/projects/migrations/0012_proper-name-for-install-project.py
index 8f5f116269b..9f143977ca0 100644
--- a/readthedocs/projects/migrations/0012_proper-name-for-install-project.py
+++ b/readthedocs/projects/migrations/0012_proper-name-for-install-project.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0013_add-container-limits.py b/readthedocs/projects/migrations/0013_add-container-limits.py
index c2820609037..9a052e00fc1 100644
--- a/readthedocs/projects/migrations/0013_add-container-limits.py
+++ b/readthedocs/projects/migrations/0013_add-container-limits.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0014_add-state-tracking.py b/readthedocs/projects/migrations/0014_add-state-tracking.py
index 628bf970dce..d2c34c28e5a 100644
--- a/readthedocs/projects/migrations/0014_add-state-tracking.py
+++ b/readthedocs/projects/migrations/0014_add-state-tracking.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0015_add_project_allow_promos.py b/readthedocs/projects/migrations/0015_add_project_allow_promos.py
index 5c50eeac924..882893160fc 100644
--- a/readthedocs/projects/migrations/0015_add_project_allow_promos.py
+++ b/readthedocs/projects/migrations/0015_add_project_allow_promos.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0016_build-queue-name.py b/readthedocs/projects/migrations/0016_build-queue-name.py
index 46833ada78c..0350bae8690 100644
--- a/readthedocs/projects/migrations/0016_build-queue-name.py
+++ b/readthedocs/projects/migrations/0016_build-queue-name.py
@@ -1,17 +1,15 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations
def update_build_queue(apps, schema):
- """Update project build queue to include the previously implied build- prefix"""
- Project = apps.get_model("projects", "Project")
+ """Update project build queue to include the previously implied build-
+ prefix."""
+ Project = apps.get_model('projects', 'Project')
for project in Project.objects.all():
if project.build_queue is not None:
if not project.build_queue.startswith('build-'):
- project.build_queue = 'build-{0}'.format(project.build_queue)
+ project.build_queue = 'build-{}'.format(project.build_queue)
project.save()
diff --git a/readthedocs/projects/migrations/0017_add_domain_https.py b/readthedocs/projects/migrations/0017_add_domain_https.py
index 9bf94eeb5cb..18788581ccf 100644
--- a/readthedocs/projects/migrations/0017_add_domain_https.py
+++ b/readthedocs/projects/migrations/0017_add_domain_https.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0018_fix-translation-model.py b/readthedocs/projects/migrations/0018_fix-translation-model.py
index bfe283d27cb..2541fb0d36b 100644
--- a/readthedocs/projects/migrations/0018_fix-translation-model.py
+++ b/readthedocs/projects/migrations/0018_fix-translation-model.py
@@ -1,9 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0019_add-features.py b/readthedocs/projects/migrations/0019_add-features.py
index 6d1036dd123..6b7ee7a8bcd 100644
--- a/readthedocs/projects/migrations/0019_add-features.py
+++ b/readthedocs/projects/migrations/0019_add-features.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-27 12:55
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0020_add-api-project-proxy.py b/readthedocs/projects/migrations/0020_add-api-project-proxy.py
index 34eafa4846f..0040581f77b 100644
--- a/readthedocs/projects/migrations/0020_add-api-project-proxy.py
+++ b/readthedocs/projects/migrations/0020_add-api-project-proxy.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-10-27 12:56
-from __future__ import unicode_literals
-
-from django.db import migrations, models
+from django.db import migrations
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py b/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py
index 84dc7cf923b..91f27b6d001 100644
--- a/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py
+++ b/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""Add feature for allowing access to deprecated webhook endpoints"""
-
-from __future__ import unicode_literals
+"""Add feature for allowing access to deprecated webhook endpoints."""
from django.db import migrations
@@ -30,5 +28,5 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunPython(forward_add_feature, reverse_add_feature)
+ migrations.RunPython(forward_add_feature, reverse_add_feature),
]
diff --git a/readthedocs/projects/migrations/0022_add-alias-slug.py b/readthedocs/projects/migrations/0022_add-alias-slug.py
index 8439c56e85e..90c434a2752 100644
--- a/readthedocs/projects/migrations/0022_add-alias-slug.py
+++ b/readthedocs/projects/migrations/0022_add-alias-slug.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-12-21 16:30
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0023_migrate-alias-slug.py b/readthedocs/projects/migrations/0023_migrate-alias-slug.py
index 4942848b952..531c3dc332f 100644
--- a/readthedocs/projects/migrations/0023_migrate-alias-slug.py
+++ b/readthedocs/projects/migrations/0023_migrate-alias-slug.py
@@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-12-21 16:31
-from __future__ import unicode_literals
+import re
from django.db import migrations
-import re
-
class Migration(migrations.Migration):
@@ -14,7 +12,7 @@ def migrate_data(apps, schema_editor):
# so that we don't break a bunch of folks URL's.
# They will have to change them on update.
invalid_chars_re = re.compile('[^-._a-zA-Z0-9]')
- ProjectRelationship = apps.get_model("projects", "ProjectRelationship")
+ ProjectRelationship = apps.get_model('projects', 'ProjectRelationship')
for p in ProjectRelationship.objects.all():
if p.alias and invalid_chars_re.match(p.alias):
new_alias = invalid_chars_re.sub('', p.alias)
@@ -29,5 +27,5 @@ def reverse(apps, schema_editor):
]
operations = [
- migrations.RunPython(migrate_data, reverse)
+ migrations.RunPython(migrate_data, reverse),
]
diff --git a/readthedocs/projects/migrations/0024_add-show-version-warning.py b/readthedocs/projects/migrations/0024_add-show-version-warning.py
index 6bc60e4aeeb..bfa0b2edb9e 100644
--- a/readthedocs/projects/migrations/0024_add-show-version-warning.py
+++ b/readthedocs/projects/migrations/0024_add-show-version-warning.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-02 01:27
-from __future__ import unicode_literals
-
from django.db import migrations, models
+
import readthedocs.projects.validators
diff --git a/readthedocs/projects/migrations/0025_show-version-warning-existing-projects.py b/readthedocs/projects/migrations/0025_show-version-warning-existing-projects.py
index 5d073258caf..e38349fc52f 100644
--- a/readthedocs/projects/migrations/0025_show-version-warning-existing-projects.py
+++ b/readthedocs/projects/migrations/0025_show-version-warning-existing-projects.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 19:25
-from __future__ import unicode_literals
-
from django.db import migrations
diff --git a/readthedocs/projects/migrations/0026_ad-free-option.py b/readthedocs/projects/migrations/0026_ad-free-option.py
index a32fe5e74b7..d108f25e190 100644
--- a/readthedocs/projects/migrations/0026_ad-free-option.py
+++ b/readthedocs/projects/migrations/0026_ad-free-option.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-06-29 15:53
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0027_remove_json_with_html_feature.py b/readthedocs/projects/migrations/0027_remove_json_with_html_feature.py
index c5daf0ece54..d0fb5d14e93 100644
--- a/readthedocs/projects/migrations/0027_remove_json_with_html_feature.py
+++ b/readthedocs/projects/migrations/0027_remove_json_with_html_feature.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-08-22 09:19
-from __future__ import unicode_literals
-
from django.db import migrations
diff --git a/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py b/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py
index 056e9b8e61a..4d4fc04a5a8 100644
--- a/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py
+++ b/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-31 10:08
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0029_add_additional_languages.py b/readthedocs/projects/migrations/0029_add_additional_languages.py
index b4cfc77535b..8e9e48d4b6f 100644
--- a/readthedocs/projects/migrations/0029_add_additional_languages.py
+++ b/readthedocs/projects/migrations/0029_add_additional_languages.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-01 13:38
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0030_change-max-length-project-slug.py b/readthedocs/projects/migrations/0030_change-max-length-project-slug.py
index 7e9b48da270..ee27e9602a9 100644
--- a/readthedocs/projects/migrations/0030_change-max-length-project-slug.py
+++ b/readthedocs/projects/migrations/0030_change-max-length-project-slug.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-01 20:55
-from __future__ import unicode_literals
-
from django.db import migrations, models
from django.db.models.functions import Length
diff --git a/readthedocs/projects/migrations/0031_add_modified_date_importedfile.py b/readthedocs/projects/migrations/0031_add_modified_date_importedfile.py
index 255da1c003a..617a420c2a3 100644
--- a/readthedocs/projects/migrations/0031_add_modified_date_importedfile.py
+++ b/readthedocs/projects/migrations/0031_add_modified_date_importedfile.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-01 14:37
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0032_increase_webhook_maxsize.py b/readthedocs/projects/migrations/0032_increase_webhook_maxsize.py
index eed6d3de06a..49b231590ed 100644
--- a/readthedocs/projects/migrations/0032_increase_webhook_maxsize.py
+++ b/readthedocs/projects/migrations/0032_increase_webhook_maxsize.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-06 23:12
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0033_add_environment_variables.py b/readthedocs/projects/migrations/0033_add_environment_variables.py
index de9e3d18e5b..9279fa8b338 100644
--- a/readthedocs/projects/migrations/0033_add_environment_variables.py
+++ b/readthedocs/projects/migrations/0033_add_environment_variables.py
@@ -1,10 +1,8 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-12 13:57
-from __future__ import unicode_literals
-
-from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/projects/migrations/0034_remove_unused_project_model_fields.py b/readthedocs/projects/migrations/0034_remove_unused_project_model_fields.py
index 996cdd9c6d3..4ad8b27f8a5 100644
--- a/readthedocs/projects/migrations/0034_remove_unused_project_model_fields.py
+++ b/readthedocs/projects/migrations/0034_remove_unused_project_model_fields.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-05 12:20
-from __future__ import unicode_literals
-
from django.db import migrations
diff --git a/readthedocs/projects/migrations/0035_container_time_limit_as_integer.py b/readthedocs/projects/migrations/0035_container_time_limit_as_integer.py
index 28dab124004..7256548fd6f 100644
--- a/readthedocs/projects/migrations/0035_container_time_limit_as_integer.py
+++ b/readthedocs/projects/migrations/0035_container_time_limit_as_integer.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-12-10 11:19
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/migrations/0036_remove-auto-doctype.py b/readthedocs/projects/migrations/0036_remove-auto-doctype.py
index 8688fc4d07c..b0f9ad28165 100644
--- a/readthedocs/projects/migrations/0036_remove-auto-doctype.py
+++ b/readthedocs/projects/migrations/0036_remove-auto-doctype.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-12-17 17:32
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py
index f4fdea8f0ba..f39a54c4625 100644
--- a/readthedocs/projects/models.py
+++ b/readthedocs/projects/models.py
@@ -1,24 +1,21 @@
# -*- coding: utf-8 -*-
-"""Project models."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Project models."""
import fnmatch
import logging
import os
-from builtins import object # pylint: disable=redefined-builtin
-from six.moves import shlex_quote
+from urllib.parse import urlparse
from django.conf import settings
from django.contrib.auth.models import User
-from django.urls import NoReverseMatch, reverse
from django.db import models
+from django.urls import NoReverseMatch, reverse
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django_extensions.db.models import TimeStampedModel
-from future.backports.urllib.parse import urlparse # noqa
from guardian.shortcuts import assign
+from six.moves import shlex_quote
from taggit.managers import TaggableManager
from readthedocs.builds.constants import LATEST, STABLE
@@ -27,15 +24,22 @@
from readthedocs.projects import constants
from readthedocs.projects.exceptions import ProjectConfigurationError
from readthedocs.projects.querysets import (
- ChildRelatedProjectQuerySet, FeatureQuerySet, ProjectQuerySet,
- RelatedProjectQuerySet)
+ ChildRelatedProjectQuerySet,
+ FeatureQuerySet,
+ ProjectQuerySet,
+ RelatedProjectQuerySet,
+)
from readthedocs.projects.templatetags.projects_tags import sort_version_aware
-from readthedocs.projects.validators import validate_domain_name, validate_repository_url
+from readthedocs.projects.validators import (
+ validate_domain_name,
+ validate_repository_url,
+)
from readthedocs.projects.version_handling import determine_stable_version
from readthedocs.restapi.client import api
from readthedocs.vcs_support.backends import backend_cls
from readthedocs.vcs_support.utils import Lock, NonBlockingLock
+
log = logging.getLogger(__name__)
@@ -48,21 +52,33 @@ class ProjectRelationship(models.Model):
This is used for subprojects
"""
- parent = models.ForeignKey('Project', verbose_name=_('Parent'),
- related_name='subprojects')
- child = models.ForeignKey('Project', verbose_name=_('Child'),
- related_name='superprojects')
- alias = models.SlugField(_('Alias'), max_length=255, null=True, blank=True, db_index=False)
+ parent = models.ForeignKey(
+ 'Project',
+ verbose_name=_('Parent'),
+ related_name='subprojects',
+ )
+ child = models.ForeignKey(
+ 'Project',
+ verbose_name=_('Child'),
+ related_name='superprojects',
+ )
+ alias = models.SlugField(
+ _('Alias'),
+ max_length=255,
+ null=True,
+ blank=True,
+ db_index=False,
+ )
objects = ChildRelatedProjectQuerySet.as_manager()
def __str__(self):
- return '%s -> %s' % (self.parent, self.child)
+ return '{} -> {}'.format(self.parent, self.child)
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
if not self.alias:
self.alias = self.child.slug
- super(ProjectRelationship, self).save(*args, **kwargs)
+ super().save(*args, **kwargs)
# HACK
def get_absolute_url(self):
@@ -79,113 +95,202 @@ class Project(models.Model):
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
# Generally from conf.py
- users = models.ManyToManyField(User, verbose_name=_('User'),
- related_name='projects')
+ users = models.ManyToManyField(
+ User,
+ verbose_name=_('User'),
+ related_name='projects',
+ )
# A DNS label can contain up to 63 characters.
name = models.CharField(_('Name'), max_length=63)
slug = models.SlugField(_('Slug'), max_length=63, unique=True)
- description = models.TextField(_('Description'), blank=True,
- help_text=_('The reStructuredText '
- 'description of the project'))
- repo = models.CharField(_('Repository URL'), max_length=255,
- validators=[validate_repository_url],
- help_text=_('Hosted documentation repository URL'))
- repo_type = models.CharField(_('Repository type'), max_length=10,
- choices=constants.REPO_CHOICES, default='git')
- project_url = models.URLField(_('Project homepage'), blank=True,
- help_text=_('The project\'s homepage'))
- canonical_url = models.URLField(_('Canonical URL'), blank=True,
- help_text=_('URL that documentation is expected to serve from'))
+ description = models.TextField(
+ _('Description'),
+ blank=True,
+ help_text=_(
+ 'The reStructuredText '
+ 'description of the project',
+ ),
+ )
+ repo = models.CharField(
+ _('Repository URL'),
+ max_length=255,
+ validators=[validate_repository_url],
+ help_text=_('Hosted documentation repository URL'),
+ )
+ repo_type = models.CharField(
+ _('Repository type'),
+ max_length=10,
+ choices=constants.REPO_CHOICES,
+ default='git',
+ )
+ project_url = models.URLField(
+ _('Project homepage'),
+ blank=True,
+ help_text=_('The project\'s homepage'),
+ )
+ canonical_url = models.URLField(
+ _('Canonical URL'),
+ blank=True,
+ help_text=_('URL that documentation is expected to serve from'),
+ )
single_version = models.BooleanField(
- _('Single version'), default=False,
- help_text=_('A single version site has no translations and only your '
- '"latest" version, served at the root of the domain. Use '
- 'this with caution, only turn it on if you will never '
- 'have multiple versions of your docs.'))
+ _('Single version'),
+ default=False,
+ help_text=_(
+ 'A single version site has no translations and only your '
+ '"latest" version, served at the root of the domain. Use '
+ 'this with caution, only turn it on if you will never '
+ 'have multiple versions of your docs.',
+ ),
+ )
default_version = models.CharField(
- _('Default version'), max_length=255, default=LATEST,
- help_text=_('The version of your project that / redirects to'))
+ _('Default version'),
+ max_length=255,
+ default=LATEST,
+ help_text=_('The version of your project that / redirects to'),
+ )
# In default_branch, None means the backend should choose the
# appropriate branch. Eg 'master' for git
default_branch = models.CharField(
- _('Default branch'), max_length=255, default=None, null=True,
- blank=True, help_text=_('What branch "latest" points to. Leave empty '
- 'to use the default value for your VCS (eg. '
- 'trunk
or master
).'))
+ _('Default branch'),
+ max_length=255,
+ default=None,
+ null=True,
+ blank=True,
+ help_text=_(
+ 'What branch "latest" points to. Leave empty '
+ 'to use the default value for your VCS (eg. '
+ 'trunk
or master
).',
+ ),
+ )
requirements_file = models.CharField(
- _('Requirements file'), max_length=255, default=None, null=True,
- blank=True, help_text=_(
+ _('Requirements file'),
+ max_length=255,
+ default=None,
+ null=True,
+ blank=True,
+ help_text=_(
'A '
'pip requirements file needed to build your documentation. '
- 'Path from the root of your project.'))
+ 'Path from the root of your project.',
+ ),
+ )
documentation_type = models.CharField(
- _('Documentation type'), max_length=20,
- choices=constants.DOCUMENTATION_CHOICES, default='sphinx',
- help_text=_('Type of documentation you are building. More info.'))
+ _('Documentation type'),
+ max_length=20,
+ choices=constants.DOCUMENTATION_CHOICES,
+ default='sphinx',
+ help_text=_(
+ 'Type of documentation you are building. More info.',
+ ),
+ )
# Project features
cdn_enabled = models.BooleanField(_('CDN Enabled'), default=False)
analytics_code = models.CharField(
- _('Analytics code'), max_length=50, null=True, blank=True,
- help_text=_('Google Analytics Tracking ID '
- '(ex. UA-22345342-1
). '
- 'This may slow down your page loads.'))
+ _('Analytics code'),
+ max_length=50,
+ null=True,
+ blank=True,
+ help_text=_(
+ 'Google Analytics Tracking ID '
+ '(ex. UA-22345342-1
). '
+ 'This may slow down your page loads.',
+ ),
+ )
container_image = models.CharField(
- _('Alternative container image'), max_length=64, null=True, blank=True)
+ _('Alternative container image'),
+ max_length=64,
+ null=True,
+ blank=True,
+ )
container_mem_limit = models.CharField(
- _('Container memory limit'), max_length=10, null=True, blank=True,
- help_text=_('Memory limit in Docker format '
- '-- example: 512m
or 1g
'))
+ _('Container memory limit'),
+ max_length=10,
+ null=True,
+ blank=True,
+ help_text=_(
+ 'Memory limit in Docker format '
+ '-- example: 512m
or 1g
',
+ ),
+ )
container_time_limit = models.IntegerField(
_('Container time limit in seconds'),
null=True,
blank=True,
)
build_queue = models.CharField(
- _('Alternate build queue id'), max_length=32, null=True, blank=True)
+ _('Alternate build queue id'),
+ max_length=32,
+ null=True,
+ blank=True,
+ )
allow_promos = models.BooleanField(
- _('Allow paid advertising'), default=True, help_text=_(
- 'If unchecked, users will still see community ads.'))
+ _('Allow paid advertising'),
+ default=True,
+ help_text=_('If unchecked, users will still see community ads.'),
+ )
ad_free = models.BooleanField(
_('Ad-free'),
default=False,
help_text='If checked, do not show advertising for this project',
)
show_version_warning = models.BooleanField(
- _('Show version warning'), default=False,
- help_text=_('Show warning banner in non-stable nor latest versions.')
+ _('Show version warning'),
+ default=False,
+ help_text=_('Show warning banner in non-stable nor latest versions.'),
)
# Sphinx specific build options.
enable_epub_build = models.BooleanField(
- _('Enable EPUB build'), default=True,
+ _('Enable EPUB build'),
+ default=True,
help_text=_(
- 'Create a EPUB version of your documentation with each build.'))
+ 'Create a EPUB version of your documentation with each build.',
+ ),
+ )
enable_pdf_build = models.BooleanField(
- _('Enable PDF build'), default=True,
+ _('Enable PDF build'),
+ default=True,
help_text=_(
- 'Create a PDF version of your documentation with each build.'))
+ 'Create a PDF version of your documentation with each build.',
+ ),
+ )
# Other model data.
- path = models.CharField(_('Path'), max_length=255, editable=False,
- help_text=_('The directory where '
- 'conf.py
lives'))
+ path = models.CharField(
+ _('Path'),
+ max_length=255,
+ editable=False,
+ help_text=_(
+ 'The directory where '
+ 'conf.py
lives',
+ ),
+ )
conf_py_file = models.CharField(
- _('Python configuration file'), max_length=255, default='', blank=True,
- help_text=_('Path from project root to conf.py
file '
- '(ex. docs/conf.py
). '
- 'Leave blank if you want us to find it for you.'))
+ _('Python configuration file'),
+ max_length=255,
+ default='',
+ blank=True,
+ help_text=_(
+ 'Path from project root to conf.py
file '
+ '(ex. docs/conf.py
). '
+ 'Leave blank if you want us to find it for you.',
+ ),
+ )
featured = models.BooleanField(_('Featured'), default=False)
skip = models.BooleanField(_('Skip'), default=False)
install_project = models.BooleanField(
_('Install Project'),
- help_text=_('Install your project inside a virtualenv using setup.py '
- 'install
'),
- default=False
+ help_text=_(
+ 'Install your project inside a virtualenv using setup.py '
+ 'install
',
+ ),
+ default=False,
)
# This model attribute holds the python interpreter used to create the
@@ -195,64 +300,104 @@ class Project(models.Model):
max_length=20,
choices=constants.PYTHON_CHOICES,
default='python',
- help_text=_('The Python interpreter used to create the virtual '
- 'environment.'))
+ help_text=_(
+ 'The Python interpreter used to create the virtual '
+ 'environment.',
+ ),
+ )
use_system_packages = models.BooleanField(
_('Use system packages'),
- help_text=_('Give the virtual environment access to the global '
- 'site-packages dir.'),
- default=False
+ help_text=_(
+ 'Give the virtual environment access to the global '
+ 'site-packages dir.',
+ ),
+ default=False,
)
privacy_level = models.CharField(
- _('Privacy Level'), max_length=20, choices=constants.PRIVACY_CHOICES,
- default=getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public'),
- help_text=_('Level of privacy that you want on the repository. '
- 'Protected means public but not in listings.'))
+ _('Privacy Level'),
+ max_length=20,
+ choices=constants.PRIVACY_CHOICES,
+ default=getattr(
+ settings,
+ 'DEFAULT_PRIVACY_LEVEL',
+ 'public',
+ ),
+ help_text=_(
+ 'Level of privacy that you want on the repository. '
+ 'Protected means public but not in listings.',
+ ),
+ )
version_privacy_level = models.CharField(
- _('Version Privacy Level'), max_length=20,
- choices=constants.PRIVACY_CHOICES, default=getattr(
- settings, 'DEFAULT_PRIVACY_LEVEL', 'public'),
- help_text=_('Default level of privacy you want on built '
- 'versions of documentation.'))
+ _('Version Privacy Level'),
+ max_length=20,
+ choices=constants.PRIVACY_CHOICES,
+ default=getattr(
+ settings,
+ 'DEFAULT_PRIVACY_LEVEL',
+ 'public',
+ ),
+ help_text=_(
+ 'Default level of privacy you want on built '
+ 'versions of documentation.',
+ ),
+ )
# Subprojects
related_projects = models.ManyToManyField(
- 'self', verbose_name=_('Related projects'), blank=True,
- symmetrical=False, through=ProjectRelationship)
+ 'self',
+ verbose_name=_('Related projects'),
+ blank=True,
+ symmetrical=False,
+ through=ProjectRelationship,
+ )
# Language bits
- language = models.CharField(_('Language'), max_length=20, default='en',
- help_text=_('The language the project '
- 'documentation is rendered in. '
- "Note: this affects your project's URL."),
- choices=constants.LANGUAGES)
+ language = models.CharField(
+ _('Language'),
+ max_length=20,
+ default='en',
+ help_text=_(
+ 'The language the project '
+ 'documentation is rendered in. '
+ "Note: this affects your project's URL.",
+ ),
+ choices=constants.LANGUAGES,
+ )
programming_language = models.CharField(
_('Programming Language'),
max_length=20,
default='words',
help_text=_(
- 'The primary programming language the project is written in.'),
- choices=constants.PROGRAMMING_LANGUAGES, blank=True)
+ 'The primary programming language the project is written in.',
+ ),
+ choices=constants.PROGRAMMING_LANGUAGES,
+ blank=True,
+ )
# A subproject pointed at its main language, so it can be tracked
- main_language_project = models.ForeignKey('self',
- related_name='translations',
- on_delete=models.SET_NULL,
- blank=True, null=True)
+ main_language_project = models.ForeignKey(
+ 'self',
+ related_name='translations',
+ on_delete=models.SET_NULL,
+ blank=True,
+ null=True,
+ )
has_valid_webhook = models.BooleanField(
- default=False, help_text=_('This project has been built with a webhook')
+ default=False,
+ help_text=_('This project has been built with a webhook'),
)
has_valid_clone = models.BooleanField(
- default=False, help_text=_('This project has been successfully cloned')
+ default=False,
+ help_text=_('This project has been successfully cloned'),
)
tags = TaggableManager(blank=True)
objects = ProjectQuerySet.as_manager()
all_objects = models.Manager()
- class Meta(object):
+ class Meta:
ordering = ('slug',)
permissions = (
# Translators: Permission around whether a user can view the
@@ -271,7 +416,7 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ
self.slug = slugify(self.name)
if not self.slug:
raise Exception(_('Model must have slug'))
- super(Project, self).save(*args, **kwargs)
+ super().save(*args, **kwargs)
for owner in self.users.all():
assign('view_project', owner, self)
try:
@@ -310,7 +455,10 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ
try:
if not first_save:
broadcast(
- type='app', task=tasks.update_static_metadata, args=[self.pk],)
+ type='app',
+ task=tasks.update_static_metadata,
+ args=[self.pk],
+ )
except Exception:
log.exception('failed to update static metadata')
try:
@@ -329,12 +477,20 @@ def get_docs_url(self, version_slug=None, lang_slug=None, private=None):
Always use http for now, to avoid content warnings.
"""
- return resolve(project=self, version_slug=version_slug, language=lang_slug, private=private)
+ return resolve(
+ project=self,
+ version_slug=version_slug,
+ language=lang_slug,
+ private=private,
+ )
def get_builds_url(self):
- return reverse('builds_project_list', kwargs={
- 'project_slug': self.slug,
- })
+ return reverse(
+ 'builds_project_list',
+ kwargs={
+ 'project_slug': self.slug,
+ },
+ )
def get_canonical_url(self):
if getattr(settings, 'DONT_HIT_DB', True):
@@ -348,11 +504,8 @@ def get_subproject_urls(self):
This is used in search result linking
"""
if getattr(settings, 'DONT_HIT_DB', True):
- return [(proj['slug'], proj['canonical_url'])
- for proj in (
- api.project(self.pk)
- .subprojects()
- .get()['subprojects'])]
+ return [(proj['slug'], proj['canonical_url']) for proj in
+ (api.project(self.pk).subprojects().get()['subprojects'])]
return [(proj.child.slug, proj.child.get_docs_url())
for proj in self.subprojects.all()]
@@ -367,29 +520,43 @@ def get_production_media_path(self, type_, version_slug, include_file=True):
:returns: Full path to media file or path
"""
- if getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public') == 'public' or settings.DEBUG:
+ if getattr(settings, 'DEFAULT_PRIVACY_LEVEL',
+ 'public') == 'public' or settings.DEBUG:
path = os.path.join(
- settings.MEDIA_ROOT, type_, self.slug, version_slug)
+ settings.MEDIA_ROOT,
+ type_,
+ self.slug,
+ version_slug,
+ )
else:
path = os.path.join(
- settings.PRODUCTION_MEDIA_ARTIFACTS, type_, self.slug, version_slug)
+ settings.PRODUCTION_MEDIA_ARTIFACTS,
+ type_,
+ self.slug,
+ version_slug,
+ )
if include_file:
path = os.path.join(
- path, '%s.%s' % (self.slug, type_.replace('htmlzip', 'zip')))
+ path,
+ '{}.{}'.format(self.slug, type_.replace('htmlzip', 'zip')),
+ )
return path
def get_production_media_url(self, type_, version_slug, full_path=True):
"""Get the URL for downloading a specific media file."""
try:
- path = reverse('project_download_media', kwargs={
- 'project_slug': self.slug,
- 'type_': type_,
- 'version_slug': version_slug,
- })
+ path = reverse(
+ 'project_download_media',
+ kwargs={
+ 'project_slug': self.slug,
+ 'type_': type_,
+ 'version_slug': version_slug,
+ },
+ )
except NoReverseMatch:
return ''
if full_path:
- path = '//%s%s' % (settings.PRODUCTION_DOMAIN, path)
+ path = '//{}{}'.format(settings.PRODUCTION_DOMAIN, path)
return path
def subdomain(self):
@@ -399,11 +566,17 @@ def subdomain(self):
def get_downloads(self):
downloads = {}
downloads['htmlzip'] = self.get_production_media_url(
- 'htmlzip', self.get_default_version())
+ 'htmlzip',
+ self.get_default_version(),
+ )
downloads['epub'] = self.get_production_media_url(
- 'epub', self.get_default_version())
+ 'epub',
+ self.get_default_version(),
+ )
downloads['pdf'] = self.get_production_media_url(
- 'pdf', self.get_default_version())
+ 'pdf',
+ self.get_default_version(),
+ )
return downloads
@property
@@ -503,7 +676,9 @@ def conf_file(self, version=LATEST):
"""Find a ``conf.py`` file in the project checkout."""
if self.conf_py_file:
conf_path = os.path.join(
- self.checkout_path(version), self.conf_py_file,)
+ self.checkout_path(version),
+ self.conf_py_file,
+ )
if os.path.exists(conf_path):
log.info('Inserting conf.py file path from model')
@@ -526,12 +701,10 @@ def conf_file(self, version=LATEST):
# the `doc` word in the path, we raise an error informing this to the user
if len(files) > 1:
raise ProjectConfigurationError(
- ProjectConfigurationError.MULTIPLE_CONF_FILES
+ ProjectConfigurationError.MULTIPLE_CONF_FILES,
)
- raise ProjectConfigurationError(
- ProjectConfigurationError.NOT_FOUND
- )
+ raise ProjectConfigurationError(ProjectConfigurationError.NOT_FOUND)
def conf_dir(self, version=LATEST):
conf_file = self.conf_file(version)
@@ -557,18 +730,30 @@ def has_aliases(self):
def has_pdf(self, version_slug=LATEST):
if not self.enable_pdf_build:
return False
- return os.path.exists(self.get_production_media_path(
- type_='pdf', version_slug=version_slug))
+ return os.path.exists(
+ self.get_production_media_path(
+ type_='pdf',
+ version_slug=version_slug,
+ )
+ )
def has_epub(self, version_slug=LATEST):
if not self.enable_epub_build:
return False
- return os.path.exists(self.get_production_media_path(
- type_='epub', version_slug=version_slug))
+ return os.path.exists(
+ self.get_production_media_path(
+ type_='epub',
+ version_slug=version_slug,
+ )
+ )
def has_htmlzip(self, version_slug=LATEST):
- return os.path.exists(self.get_production_media_path(
- type_='htmlzip', version_slug=version_slug))
+ return os.path.exists(
+ self.get_production_media_path(
+ type_='htmlzip',
+ version_slug=version_slug,
+ )
+ )
@property
def sponsored(self):
@@ -668,8 +853,10 @@ def api_versions(self):
def active_versions(self):
from readthedocs.builds.models import Version
versions = Version.objects.public(project=self, only_active=True)
- return (versions.filter(built=True, active=True) |
- versions.filter(active=True, uploaded=True))
+ return (
+ versions.filter(built=True, active=True) |
+ versions.filter(active=True, uploaded=True)
+ )
def ordered_active_versions(self, user=None):
from readthedocs.builds.models import Version
@@ -710,23 +897,27 @@ def update_stable_version(self):
current_stable = self.get_stable_version()
if current_stable:
identifier_updated = (
- new_stable.identifier != current_stable.identifier)
+ new_stable.identifier != current_stable.identifier
+ )
if identifier_updated and current_stable.active and current_stable.machine:
log.info(
'Update stable version: {project}:{version}'.format(
project=self.slug,
- version=new_stable.identifier))
+ version=new_stable.identifier,
+ ),
+ )
current_stable.identifier = new_stable.identifier
current_stable.save()
return new_stable
else:
log.info(
- 'Creating new stable version: {project}:{version}'.format(
- project=self.slug,
- version=new_stable.identifier))
+ 'Creating new stable version: {project}:{version}'
+ .format(project=self.slug, version=new_stable.identifier),
+ )
current_stable = self.versions.create_stable(
type=new_stable.type,
- identifier=new_stable.identifier)
+ identifier=new_stable.identifier,
+ )
return new_stable
def versions_from_branch_name(self, branch):
@@ -749,7 +940,8 @@ def get_default_version(self):
return self.default_version
# check if the default_version exists
version_qs = self.versions.filter(
- slug=self.default_version, active=True
+ slug=self.default_version,
+ active=True,
)
if version_qs.exists():
return self.default_version
@@ -763,7 +955,9 @@ def get_default_branch(self):
def add_subproject(self, child, alias=None):
subproject, __ = ProjectRelationship.objects.get_or_create(
- parent=self, child=child, alias=alias,
+ parent=self,
+ child=child,
+ alias=alias,
)
return subproject
@@ -796,7 +990,7 @@ def get_feature_value(self, feature, positive, negative):
@property
def show_advertising(self):
"""
- Whether this project is ad-free
+ Whether this project is ad-free.
:returns: ``True`` if advertising should be shown and ``False`` otherwise
:rtype: bool
@@ -846,13 +1040,18 @@ def __init__(self, *args, **kwargs):
ad_free = (not kwargs.pop('show_advertising', True))
# These fields only exist on the API return, not on the model, so we'll
# remove them to avoid throwing exceptions due to unexpected fields
- for key in ['users', 'resource_uri', 'absolute_url', 'downloads',
- 'main_language_project', 'related_projects']:
+ for key in [
+ 'users',
+ 'resource_uri',
+ 'absolute_url',
+ 'downloads',
+ 'main_language_project',
+ 'related_projects']:
try:
del kwargs[key]
except KeyError:
pass
- super(APIProject, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# Overwrite the database property with the value from the API
self.ad_free = ad_free
@@ -884,10 +1083,17 @@ class ImportedFile(models.Model):
things like CDN invalidation.
"""
- project = models.ForeignKey('Project', verbose_name=_('Project'),
- related_name='imported_files')
- version = models.ForeignKey('builds.Version', verbose_name=_('Version'),
- related_name='imported_files', null=True)
+ project = models.ForeignKey(
+ 'Project',
+ verbose_name=_('Project'),
+ related_name='imported_files',
+ )
+ version = models.ForeignKey(
+ 'builds.Version',
+ verbose_name=_('Version'),
+ related_name='imported_files',
+ null=True,
+ )
name = models.CharField(_('Name'), max_length=255)
slug = models.SlugField(_('Slug'))
path = models.CharField(_('Path'), max_length=255)
@@ -896,18 +1102,21 @@ class ImportedFile(models.Model):
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
def get_absolute_url(self):
- return resolve(project=self.project, version_slug=self.version.slug, filename=self.path)
+ return resolve(
+ project=self.project,
+ version_slug=self.version.slug,
+ filename=self.path,
+ )
def __str__(self):
- return '%s: %s' % (self.name, self.project)
+ return '{}: {}'.format(self.name, self.project)
class Notification(models.Model):
- project = models.ForeignKey(Project,
- related_name='%(class)s_notifications')
+ project = models.ForeignKey(Project, related_name='%(class)s_notifications')
objects = RelatedProjectQuerySet.as_manager()
- class Meta(object):
+ class Meta:
abstract = True
@@ -921,8 +1130,11 @@ def __str__(self):
@python_2_unicode_compatible
class WebHook(Notification):
- url = models.URLField(max_length=600, blank=True,
- help_text=_('URL to send the webhook to'))
+ url = models.URLField(
+ max_length=600,
+ blank=True,
+ help_text=_('URL to send the webhook to'),
+ )
def __str__(self):
return self.url
@@ -934,35 +1146,47 @@ class Domain(models.Model):
"""A custom domain name for a project."""
project = models.ForeignKey(Project, related_name='domains')
- domain = models.CharField(_('Domain'), unique=True, max_length=255,
- validators=[validate_domain_name])
+ domain = models.CharField(
+ _('Domain'),
+ unique=True,
+ max_length=255,
+ validators=[validate_domain_name],
+ )
machine = models.BooleanField(
- default=False, help_text=_('This Domain was auto-created')
+ default=False,
+ help_text=_('This Domain was auto-created'),
)
cname = models.BooleanField(
- default=False, help_text=_('This Domain is a CNAME for the project')
+ default=False,
+ help_text=_('This Domain is a CNAME for the project'),
)
canonical = models.BooleanField(
default=False,
help_text=_(
'This Domain is the primary one where the documentation is '
- 'served from')
+ 'served from',
+ ),
)
https = models.BooleanField(
_('Use HTTPS'),
default=False,
- help_text=_('Always use HTTPS for this domain')
+ help_text=_('Always use HTTPS for this domain'),
+ )
+ count = models.IntegerField(
+ default=0,
+ help_text=_('Number of times this domain has been hit'),
)
- count = models.IntegerField(default=0, help_text=_(
- 'Number of times this domain has been hit'),)
objects = RelatedProjectQuerySet.as_manager()
- class Meta(object):
+ class Meta:
ordering = ('-canonical', '-machine', 'domain')
def __str__(self):
- return '{domain} pointed at {project}'.format(domain=self.domain, project=self.project.name)
+ return '{domain} pointed at {project}'.format(
+ domain=self.domain,
+ project=self.project.name,
+ )
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
from readthedocs.projects import tasks
@@ -971,15 +1195,21 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ
self.domain = parsed.netloc
else:
self.domain = parsed.path
- super(Domain, self).save(*args, **kwargs)
- broadcast(type='app', task=tasks.symlink_domain,
- args=[self.project.pk, self.pk],)
+ super().save(*args, **kwargs)
+ broadcast(
+ type='app',
+ task=tasks.symlink_domain,
+ args=[self.project.pk, self.pk],
+ )
def delete(self, *args, **kwargs): # pylint: disable=arguments-differ
from readthedocs.projects import tasks
- broadcast(type='app', task=tasks.symlink_domain,
- args=[self.project.pk, self.pk, True],)
- super(Domain, self).delete(*args, **kwargs)
+ broadcast(
+ type='app',
+ task=tasks.symlink_domain,
+ args=[self.project.pk, self.pk, True],
+ )
+ super().delete(*args, **kwargs)
@python_2_unicode_compatible
@@ -1018,15 +1248,34 @@ def add_features(sender, **kwargs):
(ALLOW_DEPRECATED_WEBHOOKS, _('Allow deprecated webhook views')),
(PIP_ALWAYS_UPGRADE, _('Always run pip install --upgrade')),
(SKIP_SUBMODULES, _('Skip git submodule checkout')),
- (DONT_OVERWRITE_SPHINX_CONTEXT, _(
- 'Do not overwrite context vars in conf.py with Read the Docs context')),
- (ALLOW_V2_CONFIG_FILE, _(
- 'Allow to use the v2 of the configuration file')),
- (MKDOCS_THEME_RTD, _('Use Read the Docs theme for MkDocs as default theme')),
- (DONT_SHALLOW_CLONE, _(
- 'Do not shallow clone when cloning git repos')),
- (USE_TESTING_BUILD_IMAGE, _(
- 'Use Docker image labelled as `testing` to build the docs')),
+ (
+ DONT_OVERWRITE_SPHINX_CONTEXT,
+ _(
+ 'Do not overwrite context vars in conf.py with Read the Docs context',
+ ),
+ ),
+ (
+ ALLOW_V2_CONFIG_FILE,
+ _(
+ 'Allow to use the v2 of the configuration file',
+ ),
+ ),
+ (
+ MKDOCS_THEME_RTD,
+ _('Use Read the Docs theme for MkDocs as default theme')
+ ),
+ (
+ DONT_SHALLOW_CLONE,
+ _(
+ 'Do not shallow clone when cloning git repos',
+ ),
+ ),
+ (
+ USE_TESTING_BUILD_IMAGE,
+ _(
+ 'Use Docker image labelled as `testing` to build the docs',
+ ),
+ ),
)
projects = models.ManyToManyField(
@@ -1052,9 +1301,7 @@ def add_features(sender, **kwargs):
objects = FeatureQuerySet.as_manager()
def __str__(self):
- return '{0} feature'.format(
- self.get_feature_display(),
- )
+ return '{} feature'.format(self.get_feature_display(),)
def get_feature_display(self):
"""
@@ -1087,4 +1334,4 @@ def __str__(self):
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
self.value = shlex_quote(self.value)
- return super(EnvironmentVariable, self).save(*args, **kwargs)
+ return super().save(*args, **kwargs)
diff --git a/readthedocs/projects/notifications.py b/readthedocs/projects/notifications.py
index db7838bc80e..4848b575f18 100644
--- a/readthedocs/projects/notifications.py
+++ b/readthedocs/projects/notifications.py
@@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
-"""Project notifications"""
-from __future__ import absolute_import
-from datetime import timedelta
-from django.utils import timezone
+"""Project notifications."""
+
from django.http import HttpRequest
-from messages_extends.models import Message
+
from readthedocs.notifications import Notification
from readthedocs.notifications.constants import REQUIREMENT
diff --git a/readthedocs/projects/querysets.py b/readthedocs/projects/querysets.py
index 1fbab354262..3d04667d0ff 100644
--- a/readthedocs/projects/querysets.py
+++ b/readthedocs/projects/querysets.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
-"""Project model QuerySet classes."""
-from __future__ import absolute_import
+"""Project model QuerySet classes."""
from django.db import models
from django.db.models import Q
@@ -45,7 +44,9 @@ def public(self, user=None):
return queryset
def protected(self, user=None):
- queryset = self.filter(privacy_level__in=[constants.PUBLIC, constants.PROTECTED])
+ queryset = self.filter(
+ privacy_level__in=[constants.PUBLIC, constants.PROTECTED],
+ )
if user:
return self._add_user_repos(queryset, user)
return queryset
@@ -93,9 +94,11 @@ class ProjectQuerySet(SettingsOverrideObject):
class RelatedProjectQuerySetBase(models.QuerySet):
"""
- A manager for things that relate to Project and need to get their perms from the project.
+ Useful for objects that relate to Project and its permissions.
+
+ Objects get the permissions from the project itself.
- This shouldn't be used as a subclass.
+ ..note:: This shouldn't be used as a subclass.
"""
use_for_related_fields = True
@@ -124,7 +127,10 @@ def public(self, user=None, project=None):
def protected(self, user=None, project=None):
kwargs = {
- '%s__privacy_level__in' % self.project_field: [constants.PUBLIC, constants.PROTECTED]
+ '%s__privacy_level__in' % self.project_field: [
+ constants.PUBLIC,
+ constants.PROTECTED,
+ ],
}
queryset = self.filter(**kwargs)
if user:
@@ -179,5 +185,5 @@ class FeatureQuerySet(models.QuerySet):
def for_project(self, project):
return self.filter(
Q(projects=project) |
- Q(default_true=True, add_date__gt=project.pub_date)
+ Q(default_true=True, add_date__gt=project.pub_date),
).distinct()
diff --git a/readthedocs/projects/signals.py b/readthedocs/projects/signals.py
index 151259c4ccc..1d1788ef28a 100644
--- a/readthedocs/projects/signals.py
+++ b/readthedocs/projects/signals.py
@@ -1,19 +1,19 @@
# -*- coding: utf-8 -*-
-"""Project signals"""
-from __future__ import absolute_import
+"""Project signals."""
+
import django.dispatch
-before_vcs = django.dispatch.Signal(providing_args=["version"])
-after_vcs = django.dispatch.Signal(providing_args=["version"])
+before_vcs = django.dispatch.Signal(providing_args=['version'])
+after_vcs = django.dispatch.Signal(providing_args=['version'])
-before_build = django.dispatch.Signal(providing_args=["version"])
-after_build = django.dispatch.Signal(providing_args=["version"])
+before_build = django.dispatch.Signal(providing_args=['version'])
+after_build = django.dispatch.Signal(providing_args=['version'])
-project_import = django.dispatch.Signal(providing_args=["project"])
+project_import = django.dispatch.Signal(providing_args=['project'])
-files_changed = django.dispatch.Signal(providing_args=["project", "files"])
+files_changed = django.dispatch.Signal(providing_args=['project', 'files'])
# Used to force verify a domain (eg. for SSL cert issuance)
-domain_verify = django.dispatch.Signal(providing_args=["domain"])
+domain_verify = django.dispatch.Signal(providing_args=['domain'])
diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py
index 5daa35b4e62..5f00374aef1 100644
--- a/readthedocs/projects/tasks.py
+++ b/readthedocs/projects/tasks.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+
"""
Tasks related to projects.
@@ -6,13 +7,6 @@
rebuilding documentation.
"""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import datetime
import hashlib
import json
@@ -23,11 +17,10 @@
from collections import Counter, defaultdict
import requests
-from builtins import str
from celery.exceptions import SoftTimeLimitExceeded
from django.conf import settings
-from django.urls import reverse
from django.db.models import Q
+from django.urls import reverse
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from slumber.exceptions import HttpClientError
@@ -56,11 +49,11 @@
)
from readthedocs.doc_builder.exceptions import (
BuildEnvironmentError,
+ BuildEnvironmentWarning,
BuildTimeoutError,
ProjectBuildsSkippedError,
VersionLockedError,
YAMLParseError,
- BuildEnvironmentWarning,
)
from readthedocs.doc_builder.loader import get_builder_class
from readthedocs.doc_builder.python_environments import Conda, Virtualenv
@@ -79,14 +72,15 @@
after_vcs,
before_build,
before_vcs,
- files_changed,
domain_verify,
+ files_changed,
)
+
log = logging.getLogger(__name__)
-class SyncRepositoryMixin(object):
+class SyncRepositoryMixin:
"""Mixin that handles the VCS sync/update."""
@@ -106,9 +100,9 @@ def get_version(project=None, version_pk=None):
if version_pk:
version_data = api_v2.version(version_pk).get()
else:
- version_data = (api_v2
- .version(project.slug)
- .get(slug=LATEST)['objects'][0])
+ version_data = (
+ api_v2.version(project.slug).get(slug=LATEST)['objects'][0]
+ )
return APIVersion(**version_data)
def get_vcs_repo(self):
@@ -143,11 +137,13 @@ def sync_repo(self):
slug=self.version.slug,
identifier=self.version.identifier,
)
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg=msg,
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg=msg,
+ ),
+ )
version_repo = self.get_vcs_repo()
version_repo.update()
self.sync_versions(version_repo)
@@ -159,34 +155,28 @@ def sync_versions(self, version_repo):
"""
Update tags/branches hitting the API.
- It may trigger a new build to the stable version
- when hittig the ``sync_versions`` endpoint.
+ It may trigger a new build to the stable version when hittig the
+ ``sync_versions`` endpoint.
"""
version_post_data = {'repo': version_repo.repo_url}
if version_repo.supports_tags:
- version_post_data['tags'] = [
- {
- 'identifier': v.identifier,
- 'verbose_name': v.verbose_name,
- }
- for v in version_repo.tags
- ]
+ version_post_data['tags'] = [{
+ 'identifier': v.identifier,
+ 'verbose_name': v.verbose_name,
+ } for v in version_repo.tags]
if version_repo.supports_branches:
- version_post_data['branches'] = [
- {
- 'identifier': v.identifier,
- 'verbose_name': v.verbose_name,
- }
- for v in version_repo.branches
- ]
+ version_post_data['branches'] = [{
+ 'identifier': v.identifier,
+ 'verbose_name': v.verbose_name,
+ } for v in version_repo.branches]
self.validate_duplicate_reserved_versions(version_post_data)
try:
api_v2.project(self.project.pk).sync_versions.post(
- version_post_data
+ version_post_data,
)
except HttpClientError:
log.exception('Sync Versions Exception')
@@ -211,7 +201,7 @@ def validate_duplicate_reserved_versions(self, data):
for reserved_name in [STABLE_VERBOSE_NAME, LATEST_VERBOSE_NAME]:
if counter[reserved_name] > 1:
raise RepositoryError(
- RepositoryError.DUPLICATED_RESERVED_VERSIONS
+ RepositoryError.DUPLICATED_RESERVED_VERSIONS,
)
@@ -269,7 +259,6 @@ def run(self, version_pk): # pylint: disable=arguments-differ
'version': self.version.slug,
},
},
-
)
return False
@@ -283,8 +272,8 @@ def run(self, version_pk): # pylint: disable=arguments-differ
ProjectBuildsSkippedError,
YAMLParseError,
BuildTimeoutError,
- ProjectBuildsSkippedError
- )
+ ProjectBuildsSkippedError,
+ ),
)
def update_docs_task(self, project_id, *args, **kwargs):
step = UpdateDocsTaskStep(task=self)
@@ -306,12 +295,19 @@ class UpdateDocsTaskStep(SyncRepositoryMixin):
underlying task. Previously, we were using a custom ``celery.Task`` for
this, but this class is only instantiated once -- on startup. The effect
was that this instance shared state between workers.
-
"""
- def __init__(self, build_env=None, python_env=None, config=None,
- force=False, build=None, project=None,
- version=None, task=None):
+ def __init__(
+ self,
+ build_env=None,
+ python_env=None,
+ config=None,
+ force=False,
+ build=None,
+ project=None,
+ version=None,
+ task=None,
+ ):
self.build_env = build_env
self.python_env = python_env
self.build_force = force
@@ -330,8 +326,10 @@ def __init__(self, build_env=None, python_env=None, config=None,
self.setup_env = None
# pylint: disable=arguments-differ
- def run(self, pk, version_pk=None, build_pk=None, record=True,
- docker=None, force=False, **__):
+ def run(
+ self, pk, version_pk=None, build_pk=None, record=True, docker=None,
+ force=False, **__
+ ):
"""
Run a documentation sync n' build.
@@ -391,7 +389,7 @@ def run(self, pk, version_pk=None, build_pk=None, record=True,
self.setup_env.failure = BuildEnvironmentError(
BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(
build_id=build_pk,
- )
+ ),
)
self.setup_env.update_build(BUILD_STATE_FINISHED)
@@ -419,7 +417,7 @@ def run(self, pk, version_pk=None, build_pk=None, record=True,
self.build_env.failure = BuildEnvironmentError(
BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(
build_id=build_pk,
- )
+ ),
)
self.build_env.update_build(BUILD_STATE_FINISHED)
@@ -458,7 +456,7 @@ def run_setup(self, record=True):
raise YAMLParseError(
YAMLParseError.GENERIC_WITH_PARSE_EXCEPTION.format(
exception=str(e),
- )
+ ),
)
self.save_build_config()
@@ -466,13 +464,15 @@ def run_setup(self, record=True):
if self.setup_env.failure or self.config is None:
msg = 'Failing build because of setup failure: {}'.format(
- self.setup_env.failure
+ self.setup_env.failure,
+ )
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg=msg,
+ ),
)
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg=msg,
- ))
# Send notification to users only if the build didn't fail because
# of VersionLockedError: this exception occurs when a build is
@@ -527,11 +527,13 @@ def run_build(self, docker, record):
with self.build_env:
python_env_cls = Virtualenv
if self.config.conda is not None:
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg='Using conda',
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg='Using conda',
+ ),
+ )
python_env_cls = Conda
self.python_env = python_env_cls(
version=self.version,
@@ -586,12 +588,14 @@ def get_build(build_pk):
if build_pk:
build = api_v2.build(build_pk).get()
private_keys = [
- 'project', 'version', 'resource_uri', 'absolute_uri',
+ 'project',
+ 'version',
+ 'resource_uri',
+ 'absolute_uri',
]
return {
key: val
- for key, val in build.items()
- if key not in private_keys
+ for key, val in build.items() if key not in private_keys
}
def setup_vcs(self):
@@ -604,11 +608,13 @@ def setup_vcs(self):
"""
self.setup_env.update_build(state=BUILD_STATE_CLONING)
- log.info(LOG_TEMPLATE.format(
- project=self.project.slug,
- version=self.version.slug,
- msg='Updating docs from VCS',
- ))
+ log.info(
+ LOG_TEMPLATE.format(
+ project=self.project.slug,
+ version=self.version.slug,
+ msg='Updating docs from VCS',
+ ),
+ )
try:
self.sync_repo()
except RepositoryError:
@@ -689,8 +695,14 @@ def save_build_config(self):
})
self.build['config'] = config
- def update_app_instances(self, html=False, localmedia=False, search=False,
- pdf=False, epub=False):
+ def update_app_instances(
+ self,
+ html=False,
+ localmedia=False,
+ search=False,
+ pdf=False,
+ epub=False,
+ ):
"""
Update application instances with build artifacts.
@@ -801,7 +813,7 @@ def build_docs_html(self):
type='app',
task=move_files,
args=[self.version.pk, socket.gethostname()],
- kwargs=dict(html=True)
+ kwargs=dict(html=True),
)
except socket.error:
log.exception('move_files task has failed on socket error.')
@@ -853,7 +865,7 @@ def build_docs_class(self, builder_class):
"""
builder = get_builder_class(builder_class)(
self.build_env,
- python_env=self.python_env
+ python_env=self.python_env,
)
success = builder.build()
builder.move()
@@ -870,8 +882,16 @@ def is_type_sphinx(self):
# Web tasks
@app.task(queue='web')
-def sync_files(project_pk, version_pk, hostname=None, html=False,
- localmedia=False, search=False, pdf=False, epub=False):
+def sync_files(
+ project_pk,
+ version_pk,
+ hostname=None,
+ html=False,
+ localmedia=False,
+ search=False,
+ pdf=False,
+ epub=False,
+):
"""
Sync build artifacts to application instances.
@@ -914,8 +934,15 @@ def sync_files(project_pk, version_pk, hostname=None, html=False,
@app.task(queue='web')
-def move_files(version_pk, hostname, html=False, localmedia=False,
- search=False, pdf=False, epub=False):
+def move_files(
+ version_pk,
+ hostname,
+ html=False,
+ localmedia=False,
+ search=False,
+ pdf=False,
+ epub=False,
+):
"""
Task to move built documentation to web servers.
@@ -938,7 +965,7 @@ def move_files(version_pk, hostname, html=False, localmedia=False,
project=version.project.slug,
version=version.slug,
msg='Moving files',
- )
+ ),
)
if html:
@@ -1015,13 +1042,16 @@ def update_search(version_pk, commit, delete_non_commit_files=True):
else:
log.debug(
'Unknown documentation type: %s',
- version.project.documentation_type
+ version.project.documentation_type,
)
return
log_msg = ' '.join([page['path'] for page in page_list])
- log.info("(Search Index) Sending Data: %s [%s]", version.project.slug,
- log_msg)
+ log.info(
+ '(Search Index) Sending Data: %s [%s]',
+ version.project.slug,
+ log_msg,
+ )
index_search_request(
version=version,
page_list=page_list,
@@ -1065,7 +1095,9 @@ def remove_orphan_symlinks():
"""
for symlink in [PublicSymlink, PrivateSymlink]:
for domain_path in [symlink.PROJECT_CNAME_ROOT, symlink.CNAME_ROOT]:
- valid_cnames = set(Domain.objects.all().values_list('domain', flat=True))
+ valid_cnames = set(
+ Domain.objects.all().values_list('domain', flat=True),
+ )
orphan_cnames = set(os.listdir(domain_path)) - valid_cnames
for cname in orphan_cnames:
orphan_domain_path = os.path.join(domain_path, cname)
@@ -1110,7 +1142,7 @@ def fileify(version_pk, commit):
'Imported File not being built because no commit '
'information'
),
- )
+ ),
)
return
@@ -1121,7 +1153,7 @@ def fileify(version_pk, commit):
project=version.project.slug,
version=version.slug,
msg='Creating ImportedFiles',
- )
+ ),
)
_manage_imported_files(version, path, commit)
else:
@@ -1130,7 +1162,7 @@ def fileify(version_pk, commit):
project=project.slug,
version=version.slug,
msg='No ImportedFile files',
- )
+ ),
)
@@ -1145,8 +1177,10 @@ def _manage_imported_files(version, path, commit):
changed_files = set()
for root, __, filenames in os.walk(path):
for filename in filenames:
- dirpath = os.path.join(root.replace(path, '').lstrip('/'),
- filename.lstrip('/'))
+ dirpath = os.path.join(
+ root.replace(path, '').lstrip('/'),
+ filename.lstrip('/'),
+ )
full_path = os.path.join(root, filename)
md5 = hashlib.md5(open(full_path, 'rb').read()).hexdigest()
try:
@@ -1166,16 +1200,22 @@ def _manage_imported_files(version, path, commit):
obj.commit = commit
obj.save()
# Delete ImportedFiles from previous versions
- ImportedFile.objects.filter(project=version.project,
- version=version
- ).exclude(commit=commit).delete()
+ ImportedFile.objects.filter(
+ project=version.project,
+ version=version,
+ ).exclude(commit=commit).delete()
changed_files = [
resolve_path(
- version.project, filename=file, version_slug=version.slug,
+ version.project,
+ filename=file,
+ version_slug=version.slug,
) for file in changed_files
]
- files_changed.send(sender=Project, project=version.project,
- files=changed_files)
+ files_changed.send(
+ sender=Project,
+ project=version.project,
+ files=changed_files,
+ )
@app.task(queue='web')
@@ -1185,7 +1225,10 @@ def send_notifications(version_pk, build_pk):
for hook in version.project.webhook_notifications.all():
webhook_notification(version, build, hook.url)
- for email in version.project.emailhook_notifications.all().values_list('email', flat=True):
+ for email in version.project.emailhook_notifications.all().values_list(
+ 'email',
+ flat=True,
+ ):
email_notification(version, build, email)
@@ -1202,7 +1245,7 @@ def email_notification(version, build, email):
project=version.project.slug,
version=version.slug,
msg='sending email to: %s' % email,
- )
+ ),
)
# We send only what we need from the Django model objects here to avoid
@@ -1218,20 +1261,24 @@ def email_notification(version, build, email):
'pk': build.pk,
'error': build.error,
},
- 'build_url': 'https://{0}{1}'.format(
+ 'build_url': 'https://{}{}'.format(
getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
build.get_absolute_url(),
),
- 'unsub_url': 'https://{0}{1}'.format(
+ 'unsub_url': 'https://{}{}'.format(
getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
reverse('projects_notifications', args=[version.project.slug]),
),
}
if build.commit:
- title = _('Failed: {project[name]} ({commit})').format(commit=build.commit[:8], **context)
+ title = _(
+ 'Failed: {project[name]} ({commit})',
+ ).format(commit=build.commit[:8], **context)
else:
- title = _('Failed: {project[name]} ({version[verbose_name]})').format(**context)
+ title = _('Failed: {project[name]} ({version[verbose_name]})').format(
+ **context
+ )
send_email(
email,
@@ -1266,7 +1313,7 @@ def webhook_notification(version, build, hook_url):
project=project.slug,
version='',
msg='sending notification to: %s' % hook_url,
- )
+ ),
)
try:
requests.post(hook_url, data=data)
@@ -1299,7 +1346,7 @@ def update_static_metadata(project_pk, path=None):
project=project.slug,
version='',
msg='Updating static metadata',
- )
+ ),
)
translations = [trans.language for trans in project.translations.all()]
languages = set(translations)
@@ -1321,8 +1368,8 @@ def update_static_metadata(project_pk, path=None):
LOG_TEMPLATE.format(
project=project.slug,
version='',
- msg='Cannot write to metadata.json: {0}'.format(e),
- )
+ msg='Cannot write to metadata.json: {}'.format(e),
+ ),
)
@@ -1367,8 +1414,9 @@ def finish_inactive_builds():
"""
time_limit = int(DOCKER_LIMITS['time'] * 1.2)
delta = datetime.timedelta(seconds=time_limit)
- query = (~Q(state=BUILD_STATE_FINISHED) &
- Q(date__lte=timezone.now() - delta))
+ query = (
+ ~Q(state=BUILD_STATE_FINISHED) & Q(date__lte=timezone.now() - delta)
+ )
builds_finished = 0
builds = Build.objects.filter(query)[:50]
@@ -1388,7 +1436,7 @@ def finish_inactive_builds():
build.error = _(
'This build was terminated due to inactivity. If you '
'continue to encounter this error, file a support '
- 'request with and reference this build id ({0}).'.format(build.pk),
+ 'request with and reference this build id ({}).'.format(build.pk),
)
build.save()
builds_finished += 1
@@ -1402,7 +1450,7 @@ def finish_inactive_builds():
@app.task(queue='web')
def retry_domain_verification(domain_pk):
"""
- Trigger domain verification on a domain
+ Trigger domain verification on a domain.
:param domain_pk: a `Domain` pk to verify
"""
diff --git a/readthedocs/projects/templatetags/projects_tags.py b/readthedocs/projects/templatetags/projects_tags.py
index 699b1a24900..6a6d6d16a2e 100644
--- a/readthedocs/projects/templatetags/projects_tags.py
+++ b/readthedocs/projects/templatetags/projects_tags.py
@@ -1,6 +1,7 @@
-"""Project template tags and filters"""
+# -*- coding: utf-8 -*-
+
+"""Project template tags and filters."""
-from __future__ import absolute_import
from django import template
from readthedocs.projects.version_handling import comparable_version
@@ -11,14 +12,15 @@
@register.filter
def sort_version_aware(versions):
- """Takes a list of versions objects and sort them using version schemes"""
+ """Takes a list of versions objects and sort them using version schemes."""
return sorted(
versions,
key=lambda version: comparable_version(version.verbose_name),
- reverse=True)
+ reverse=True,
+ )
@register.filter
def is_project_user(user, project):
- """Return if user is a member of project.users"""
+ """Return if user is a member of project.users."""
return user in project.users.all()
diff --git a/readthedocs/projects/urls/private.py b/readthedocs/projects/urls/private.py
index 5dc7b649de7..75d6324d03b 100644
--- a/readthedocs/projects/urls/private.py
+++ b/readthedocs/projects/urls/private.py
@@ -1,11 +1,6 @@
-"""Project URLs for authenticated users."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Project URLs for authenticated users."""
from django.conf.urls import url
@@ -19,8 +14,8 @@
DomainUpdate,
EnvironmentVariableCreate,
EnvironmentVariableDelete,
- EnvironmentVariableList,
EnvironmentVariableDetail,
+ EnvironmentVariableList,
ImportView,
IntegrationCreate,
IntegrationDelete,
@@ -34,177 +29,242 @@
ProjectUpdate,
)
-urlpatterns = [
- url(r'^$',
- ProjectDashboard.as_view(),
- name='projects_dashboard'),
-
- url(r'^import/$',
- ImportView.as_view(wizard_class=ImportWizardView),
- {'wizard': ImportWizardView},
- name='projects_import'),
-
- url(r'^import/manual/$',
- ImportWizardView.as_view(),
- name='projects_import_manual'),
-
- url(r'^import/manual/demo/$',
- ImportDemoView.as_view(),
- name='projects_import_demo'),
-
- url(r'^(?P[-\w]+)/$',
- private.project_manage,
- name='projects_manage'),
-
- url(r'^(?P[-\w]+)/edit/$',
- ProjectUpdate.as_view(),
- name='projects_edit'),
-
- url(r'^(?P[-\w]+)/advanced/$',
- ProjectAdvancedUpdate.as_view(),
- name='projects_advanced'),
-
- url(r'^(?P[-\w]+)/version/(?P[^/]+)/delete_html/$',
- private.project_version_delete_html,
- name='project_version_delete_html'),
- url(r'^(?P[-\w]+)/version/(?P[^/]+)/$',
- private.project_version_detail,
- name='project_version_detail'),
-
- url(r'^(?P[-\w]+)/versions/$',
- private.project_versions,
- name='projects_versions'),
-
- url(r'^(?P[-\w]+)/delete/$',
- private.project_delete,
- name='projects_delete'),
-
- url(r'^(?P[-\w]+)/users/$',
- private.project_users,
- name='projects_users'),
-
- url(r'^(?P[-\w]+)/users/delete/$',
- private.project_users_delete,
- name='projects_users_delete'),
-
- url(r'^(?P[-\w]+)/notifications/$',
- private.project_notifications,
- name='projects_notifications'),
-
- url(r'^(?P[-\w]+)/notifications/delete/$',
- private.project_notifications_delete,
- name='projects_notification_delete'),
-
- url(r'^(?P[-\w]+)/translations/$',
- private.project_translations,
- name='projects_translations'),
-
- url(r'^(?P[-\w]+)/translations/delete/(?P[-\w]+)/$', # noqa
+urlpatterns = [
+ url(r'^$', ProjectDashboard.as_view(), name='projects_dashboard'),
+ url(
+ r'^import/$', ImportView.as_view(wizard_class=ImportWizardView),
+ {'wizard': ImportWizardView}, name='projects_import',
+ ),
+ url(
+ r'^import/manual/$', ImportWizardView.as_view(),
+ name='projects_import_manual',
+ ),
+ url(
+ r'^import/manual/demo/$', ImportDemoView.as_view(),
+ name='projects_import_demo',
+ ),
+ url(
+ r'^(?P[-\w]+)/$', private.project_manage,
+ name='projects_manage',
+ ),
+ url(
+ r'^(?P[-\w]+)/edit/$', ProjectUpdate.as_view(),
+ name='projects_edit',
+ ),
+ url(
+ r'^(?P[-\w]+)/advanced/$',
+ ProjectAdvancedUpdate.as_view(), name='projects_advanced',
+ ),
+ url(
+ r'^(?P[-\w]+)/version/(?P[^/]+)/delete_html/$',
+ private.project_version_delete_html, name='project_version_delete_html',
+ ),
+ url(
+ r'^(?P[-\w]+)/version/(?P[^/]+)/$',
+ private.project_version_detail, name='project_version_detail',
+ ),
+ url(
+ r'^(?P[-\w]+)/versions/$', private.project_versions,
+ name='projects_versions',
+ ),
+ url(
+ r'^(?P[-\w]+)/delete/$', private.project_delete,
+ name='projects_delete',
+ ),
+ url(
+ r'^(?P[-\w]+)/users/$', private.project_users,
+ name='projects_users',
+ ),
+ url(
+ r'^(?P[-\w]+)/users/delete/$',
+ private.project_users_delete, name='projects_users_delete',
+ ),
+ url(
+ r'^(?P[-\w]+)/notifications/$',
+ private.project_notifications, name='projects_notifications',
+ ),
+ url(
+ r'^(?P[-\w]+)/notifications/delete/$',
+ private.project_notifications_delete, name='projects_notification_delete',
+ ),
+ url(
+ r'^(?P[-\w]+)/translations/$',
+ private.project_translations, name='projects_translations',
+ ),
+ url(
+ r'^(?P[-\w]+)/translations/delete/(?P[-\w]+)/$', # noqa
private.project_translations_delete,
- name='projects_translations_delete'),
-
- url(r'^(?P[-\w]+)/redirects/$',
- private.project_redirects,
- name='projects_redirects'),
-
- url(r'^(?P[-\w]+)/redirects/delete/$',
- private.project_redirects_delete,
- name='projects_redirects_delete'),
-
- url(r'^(?P[-\w]+)/advertising/$',
- ProjectAdvertisingUpdate.as_view(),
- name='projects_advertising'),
+ name='projects_translations_delete',
+ ),
+ url(
+ r'^(?P[-\w]+)/redirects/$', private.project_redirects,
+ name='projects_redirects',
+ ),
+ url(
+ r'^(?P[-\w]+)/redirects/delete/$',
+ private.project_redirects_delete, name='projects_redirects_delete',
+ ),
+ url(
+ r'^(?P[-\w]+)/advertising/$',
+ ProjectAdvertisingUpdate.as_view(), name='projects_advertising',
+ ),
]
domain_urls = [
- url(r'^(?P[-\w]+)/domains/$',
+ url(
+ r'^(?P[-\w]+)/domains/$',
DomainList.as_view(),
- name='projects_domains'),
- url(r'^(?P[-\w]+)/domains/create/$',
+ name='projects_domains',
+ ),
+ url(
+ r'^(?P[-\w]+)/domains/create/$',
DomainCreate.as_view(),
- name='projects_domains_create'),
- url(r'^(?P[-\w]+)/domains/(?P[-\w]+)/edit/$',
+ name='projects_domains_create',
+ ),
+ url(
+ r'^(?P[-\w]+)/domains/(?P[-\w]+)/edit/$',
DomainUpdate.as_view(),
- name='projects_domains_edit'),
- url(r'^(?P[-\w]+)/domains/(?P[-\w]+)/delete/$',
+ name='projects_domains_edit',
+ ),
+ url(
+ r'^(?P[-\w]+)/domains/(?P[-\w]+)/delete/$',
DomainDelete.as_view(),
- name='projects_domains_delete'),
+ name='projects_domains_delete',
+ ),
]
urlpatterns += domain_urls
integration_urls = [
- url(r'^(?P{project_slug})/integrations/$'.format(**pattern_opts),
+ url(
+ r'^(?P{project_slug})/integrations/$'.format(
+ **pattern_opts
+ ),
IntegrationList.as_view(),
- name='projects_integrations'),
- url(r'^(?P{project_slug})/integrations/sync/$'.format(**pattern_opts),
+ name='projects_integrations',
+ ),
+ url(
+ r'^(?P{project_slug})/integrations/sync/$'.format(
+ **pattern_opts
+ ),
IntegrationWebhookSync.as_view(),
- name='projects_integrations_webhooks_sync'),
- url((r'^(?P{project_slug})/integrations/create/$'
- .format(**pattern_opts)),
+ name='projects_integrations_webhooks_sync',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/integrations/create/$'.format(
+ **pattern_opts
+ )
+ ),
IntegrationCreate.as_view(),
- name='projects_integrations_create'),
- url((r'^(?P{project_slug})/'
- r'integrations/(?P{integer_pk})/$'
- .format(**pattern_opts)),
+ name='projects_integrations_create',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'integrations/(?P{integer_pk})/$'.format(
+ **pattern_opts
+ )
+ ),
IntegrationDetail.as_view(),
- name='projects_integrations_detail'),
- url((r'^(?P{project_slug})/'
- r'integrations/(?P{integer_pk})/'
- r'exchange/(?P[-\w]+)/$'
- .format(**pattern_opts)),
+ name='projects_integrations_detail',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'integrations/(?P{integer_pk})/'
+ r'exchange/(?P[-\w]+)/$'.format(**pattern_opts)
+ ),
IntegrationExchangeDetail.as_view(),
- name='projects_integrations_exchanges_detail'),
- url((r'^(?P{project_slug})/'
- r'integrations/(?P{integer_pk})/sync/$'
- .format(**pattern_opts)),
+ name='projects_integrations_exchanges_detail',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'integrations/(?P{integer_pk})/sync/$'.format(
+ **pattern_opts
+ )
+ ),
IntegrationWebhookSync.as_view(),
- name='projects_integrations_webhooks_sync'),
- url((r'^(?P{project_slug})/'
- r'integrations/(?P{integer_pk})/delete/$'
- .format(**pattern_opts)),
+ name='projects_integrations_webhooks_sync',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'integrations/(?P{integer_pk})/delete/$'.format(
+ **pattern_opts
+ )
+ ),
IntegrationDelete.as_view(),
- name='projects_integrations_delete'),
+ name='projects_integrations_delete',
+ ),
]
urlpatterns += integration_urls
subproject_urls = [
- url(r'^(?P{project_slug})/subprojects/$'.format(**pattern_opts),
+ url(
+ r'^(?P{project_slug})/subprojects/$'.format(
+ **pattern_opts
+ ),
private.ProjectRelationshipList.as_view(),
- name='projects_subprojects'),
- url((r'^(?P{project_slug})/subprojects/create/$'
- .format(**pattern_opts)),
+ name='projects_subprojects',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/subprojects/create/$'.format(
+ **pattern_opts
+ )
+ ),
private.ProjectRelationshipCreate.as_view(),
- name='projects_subprojects_create'),
- url((r'^(?P{project_slug})/'
- r'subprojects/(?P{project_slug})/edit/$'
- .format(**pattern_opts)),
+ name='projects_subprojects_create',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'subprojects/(?P{project_slug})/edit/$'.format(
+ **pattern_opts
+ )
+ ),
private.ProjectRelationshipUpdate.as_view(),
- name='projects_subprojects_update'),
- url((r'^(?P{project_slug})/'
- r'subprojects/(?P{project_slug})/delete/$'
- .format(**pattern_opts)),
+ name='projects_subprojects_update',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/'
+ r'subprojects/(?P{project_slug})/delete/$'.format(
+ **pattern_opts
+ )
+ ),
private.ProjectRelationshipDelete.as_view(),
- name='projects_subprojects_delete'),
+ name='projects_subprojects_delete',
+ ),
]
urlpatterns += subproject_urls
environmentvariable_urls = [
- url(r'^(?P[-\w]+)/environmentvariables/$',
+ url(
+ r'^(?P[-\w]+)/environmentvariables/$',
EnvironmentVariableList.as_view(),
- name='projects_environmentvariables'),
- url(r'^(?P[-\w]+)/environmentvariables/create/$',
+ name='projects_environmentvariables',
+ ),
+ url(
+ r'^(?P[-\w]+)/environmentvariables/create/$',
EnvironmentVariableCreate.as_view(),
- name='projects_environmentvariables_create'),
- url(r'^(?P[-\w]+)/environmentvariables/(?P[-\w]+)/$',
+ name='projects_environmentvariables_create',
+ ),
+ url(
+ r'^(?P[-\w]+)/environmentvariables/(?P[-\w]+)/$',
EnvironmentVariableDetail.as_view(),
- name='projects_environmentvariables_detail'),
- url(r'^(?P[-\w]+)/environmentvariables/(?P[-\w]+)/delete/$',
+ name='projects_environmentvariables_detail',
+ ),
+ url(
+ r'^(?P[-\w]+)/environmentvariables/(?P[-\w]+)/delete/$',
EnvironmentVariableDelete.as_view(),
- name='projects_environmentvariables_delete'),
+ name='projects_environmentvariables_delete',
+ ),
]
urlpatterns += environmentvariable_urls
diff --git a/readthedocs/projects/urls/public.py b/readthedocs/projects/urls/public.py
index b46b8105aa4..f353714ac19 100644
--- a/readthedocs/projects/urls/public.py
+++ b/readthedocs/projects/urls/public.py
@@ -1,61 +1,80 @@
-"""Project URLS for public users"""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from django.conf.urls import url
+"""Project URLS for public users."""
-from readthedocs.projects.views import public
-from readthedocs.projects.views.public import ProjectIndex, ProjectDetailView
+from django.conf.urls import url
from readthedocs.builds import views as build_views
from readthedocs.constants import pattern_opts
+from readthedocs.projects.views import public
+from readthedocs.projects.views.public import ProjectDetailView, ProjectIndex
urlpatterns = [
- url(r'^$',
+ url(
+ r'^$',
ProjectIndex.as_view(),
- name='projects_list'),
-
- url(r'^(?P{project_slug})/$'.format(**pattern_opts),
+ name='projects_list',
+ ),
+ url(
+ r'^(?P{project_slug})/$'.format(**pattern_opts),
ProjectDetailView.as_view(),
- name='projects_detail'),
-
- url(r'^(?P{project_slug})/downloads/$'.format(**pattern_opts),
+ name='projects_detail',
+ ),
+ url(
+ r'^(?P{project_slug})/downloads/$'.format(**pattern_opts),
public.project_downloads,
- name='project_downloads'),
-
- url((r'^(?P{project_slug})/downloads/(?P[-\w]+)/'
- r'(?P{version_slug})/$'.format(**pattern_opts)),
+ name='project_downloads',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/downloads/(?P[-\w]+)/'
+ r'(?P{version_slug})/$'.format(**pattern_opts)
+ ),
public.project_download_media,
- name='project_download_media'),
-
- url(r'^(?P{project_slug})/badge/$'.format(**pattern_opts),
+ name='project_download_media',
+ ),
+ url(
+ r'^(?P{project_slug})/badge/$'.format(**pattern_opts),
public.project_badge,
- name='project_badge'),
-
- url((r'^(?P{project_slug})/tools/embed/$'
- .format(**pattern_opts)),
+ name='project_badge',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/tools/embed/$'.format(
+ **pattern_opts
+ )
+ ),
public.project_embed,
- name='project_embed'),
-
- url(r'^(?P{project_slug})/search/$'.format(**pattern_opts),
+ name='project_embed',
+ ),
+ url(
+ r'^(?P{project_slug})/search/$'.format(**pattern_opts),
public.elastic_project_search,
- name='elastic_project_search'),
-
- url((r'^(?P{project_slug})/builds/(?P\d+)/$'
- .format(**pattern_opts)),
+ name='elastic_project_search',
+ ),
+ url(
+ (
+ r'^(?P{project_slug})/builds/(?P\d+)/$'.format(
+ **pattern_opts
+ )
+ ),
build_views.BuildDetail.as_view(),
- name='builds_detail'),
-
- url((r'^(?P{project_slug})/builds/$'
- .format(**pattern_opts)),
+ name='builds_detail',
+ ),
+ url(
+ (r'^(?P{project_slug})/builds/$'.format(**pattern_opts)),
build_views.BuildList.as_view(),
- name='builds_project_list'),
-
- url(r'^(?P{project_slug})/versions/$'.format(**pattern_opts),
+ name='builds_project_list',
+ ),
+ url(
+ r'^(?P{project_slug})/versions/$'.format(**pattern_opts),
public.project_versions,
- name='project_version_list'),
-
- url(r'^tags/(?P[-\w]+)/$',
+ name='project_version_list',
+ ),
+ url(
+ r'^tags/(?P[-\w]+)/$',
ProjectIndex.as_view(),
- name='projects_tag_detail'),
+ name='projects_tag_detail',
+ ),
]
diff --git a/readthedocs/projects/utils.py b/readthedocs/projects/utils.py
index 840dd17482a..f83a3906a44 100644
--- a/readthedocs/projects/utils.py
+++ b/readthedocs/projects/utils.py
@@ -1,19 +1,13 @@
# -*- coding: utf-8 -*-
-"""Utility functions used by projects."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Utility functions used by projects."""
import logging
import os
-from builtins import open
from django.conf import settings
+
log = logging.getLogger(__name__)
diff --git a/readthedocs/projects/validators.py b/readthedocs/projects/validators.py
index 01d350a43aa..dc64b3fc086 100644
--- a/readthedocs/projects/validators.py
+++ b/readthedocs/projects/validators.py
@@ -1,15 +1,15 @@
+# -*- coding: utf-8 -*-
+
"""Validators for projects app."""
-# From https://github.com/django/django/pull/3477/files
-from __future__ import absolute_import
import re
+from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import ValidationError
+from django.core.validators import RegexValidator
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
-from django.core.validators import RegexValidator
-from future.backports.urllib.parse import urlparse
domain_regex = (
@@ -28,13 +28,13 @@ class DomainNameValidator(RegexValidator):
def __init__(self, accept_idna=True, **kwargs):
message = kwargs.get('message')
self.accept_idna = accept_idna
- super(DomainNameValidator, self).__init__(**kwargs)
+ super().__init__(**kwargs)
if not self.accept_idna and message is None:
self.message = _('Enter a valid domain name value')
def __call__(self, value):
try:
- super(DomainNameValidator, self).__call__(value)
+ super().__call__(value)
except ValidationError as exc:
if not self.accept_idna:
raise
@@ -44,14 +44,14 @@ def __call__(self, value):
idnavalue = value.encode('idna')
except UnicodeError:
raise exc
- super(DomainNameValidator, self).__call__(idnavalue)
+ super().__call__(idnavalue)
validate_domain_name = DomainNameValidator()
@deconstructible
-class RepositoryURLValidator(object):
+class RepositoryURLValidator:
disallow_relative_url = True
@@ -99,7 +99,7 @@ def __call__(self, value):
class SubmoduleURLValidator(RepositoryURLValidator):
"""
- A URL validator for repository submodules
+ A URL validator for repository submodules.
If a repository has a relative submodule, the URL path is effectively the
supermodule's remote ``origin`` URL with the relative path applied.
diff --git a/readthedocs/projects/version_handling.py b/readthedocs/projects/version_handling.py
index 2a7cffa5bdd..7a730e61fd0 100644
--- a/readthedocs/projects/version_handling.py
+++ b/readthedocs/projects/version_handling.py
@@ -1,15 +1,15 @@
# -*- coding: utf-8 -*-
-"""Project version handling."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Project version handling."""
import unicodedata
-import six
from packaging.version import InvalidVersion, Version
from readthedocs.builds.constants import (
- LATEST_VERBOSE_NAME, STABLE_VERBOSE_NAME, TAG)
+ LATEST_VERBOSE_NAME,
+ STABLE_VERBOSE_NAME,
+ TAG,
+)
def parse_version_failsafe(version_string):
@@ -25,7 +25,7 @@ def parse_version_failsafe(version_string):
:rtype: packaging.version.Version
"""
- if not isinstance(version_string, six.text_type):
+ if not isinstance(version_string, str):
uni_version = version_string.decode('utf-8')
else:
uni_version = version_string
@@ -89,7 +89,8 @@ def sort_versions(version_list):
versions,
key=lambda version_info: version_info[1],
reverse=True,
- ))
+ ),
+ )
def highest_version(version_list):
@@ -117,9 +118,11 @@ def determine_stable_version(version_list):
:rtype: readthedocs.builds.models.Version
"""
versions = sort_versions(version_list)
- versions = [(version_obj, comparable)
- for version_obj, comparable in versions
- if not comparable.is_prerelease]
+ versions = [
+ (version_obj, comparable)
+ for version_obj, comparable in versions
+ if not comparable.is_prerelease
+ ]
if versions:
# We take preference for tags over branches. If we don't find any tag,
diff --git a/readthedocs/projects/views/base.py b/readthedocs/projects/views/base.py
index 2bdb17fdfb2..a29d65f663d 100644
--- a/readthedocs/projects/views/base.py
+++ b/readthedocs/projects/views/base.py
@@ -1,33 +1,31 @@
# -*- coding: utf-8 -*-
-"""Mix-in classes for project views."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Mix-in classes for project views."""
import logging
-from builtins import object
from datetime import timedelta
from django.conf import settings
-from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
+from django.urls import reverse
from django.utils import timezone
from ..exceptions import ProjectSpamError
from ..models import Project
+
log = logging.getLogger(__name__)
USER_MATURITY_DAYS = getattr(settings, 'USER_MATURITY_DAYS', 7)
-class ProjectOnboardMixin(object):
+class ProjectOnboardMixin:
"""Add project onboard context data to project object views."""
def get_context_data(self, **kwargs):
"""Add onboard context data."""
- context = super(ProjectOnboardMixin, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
# If more than 1 project, don't show onboarding at all. This could
# change in the future, to onboard each user maybe?
if Project.objects.for_admin_user(self.request.user).count() > 1:
@@ -51,7 +49,7 @@ def get_context_data(self, **kwargs):
# Mixins
-class ProjectAdminMixin(object):
+class ProjectAdminMixin:
"""
Mixin class that provides project sublevel objects.
@@ -74,11 +72,12 @@ def get_project(self):
return None
return get_object_or_404(
Project.objects.for_admin_user(user=self.request.user),
- slug=self.kwargs[self.project_url_field])
+ slug=self.kwargs[self.project_url_field],
+ )
def get_context_data(self, **kwargs):
"""Add project to context data."""
- context = super(ProjectAdminMixin, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['project'] = self.get_project()
return context
@@ -88,7 +87,7 @@ def get_form(self, data=None, files=None, **kwargs):
return self.form_class(data, files, **kwargs)
-class ProjectSpamMixin(object):
+class ProjectSpamMixin:
"""Protects POST views from spammers."""
@@ -100,7 +99,7 @@ def post(self, request, *args, **kwargs):
)
return HttpResponseRedirect(self.get_failure_url())
try:
- return super(ProjectSpamMixin, self).post(request, *args, **kwargs)
+ return super().post(request, *args, **kwargs)
except ProjectSpamError:
date_maturity = timezone.now() - timedelta(days=USER_MATURITY_DAYS)
if request.user.date_joined > date_maturity:
diff --git a/readthedocs/projects/views/mixins.py b/readthedocs/projects/views/mixins.py
index 50e03beb475..670caa21f83 100644
--- a/readthedocs/projects/views/mixins.py
+++ b/readthedocs/projects/views/mixins.py
@@ -1,13 +1,13 @@
+# -*- coding: utf-8 -*-
+
"""Mixin classes for project views."""
-from __future__ import absolute_import
-from builtins import object
from django.shortcuts import get_object_or_404
from readthedocs.projects.models import Project
-class ProjectRelationMixin(object):
+class ProjectRelationMixin:
"""
Mixin class for constructing model views for project dashboard.
@@ -32,7 +32,7 @@ def get_project(self):
return None
return get_object_or_404(
self.get_project_queryset(),
- slug=self.kwargs[self.project_lookup_url_kwarg]
+ slug=self.kwargs[self.project_lookup_url_kwarg],
)
def get_queryset(self):
@@ -41,6 +41,6 @@ def get_queryset(self):
)
def get_context_data(self, **kwargs):
- context = super(ProjectRelationMixin, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context[self.project_context_object_name] = self.get_project()
return context
diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py
index 46a317613ad..02256089c81 100644
--- a/readthedocs/projects/views/private.py
+++ b/readthedocs/projects/views/private.py
@@ -2,13 +2,6 @@
"""Project views for authenticated users."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
from allauth.socialaccount.models import SocialAccount
@@ -67,8 +60,10 @@
)
from readthedocs.projects.signals import project_import
from readthedocs.projects.views.base import ProjectAdminMixin, ProjectSpamMixin
+
from ..tasks import retry_domain_verification
+
log = logging.getLogger(__name__)
@@ -87,7 +82,7 @@ def get_queryset(self):
return Project.objects.dashboard(self.request.user)
def get_context_data(self, **kwargs):
- context = super(ProjectDashboard, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
return context
@@ -227,7 +222,7 @@ def project_delete(request, project_slug):
broadcast(
type='app',
task=tasks.remove_dirs,
- args=[(project.doc_path,)]
+ args=[(project.doc_path,)],
)
project.delete()
messages.success(request, _('Project deleted'))
@@ -257,7 +252,7 @@ def get_form_kwargs(self, step=None):
def get_template_names(self):
"""Return template names based on step name."""
- return 'projects/import_{0}.html'.format(self.steps.current)
+ return 'projects/import_{}.html'.format(self.steps.current)
def done(self, form_list, **kwargs):
"""
@@ -358,7 +353,7 @@ def get(self, request, *args, **kwargs):
def get_form_data(self):
"""Get form data to post to import form."""
return {
- 'name': '{0}-demo'.format(self.request.user.username),
+ 'name': '{}-demo'.format(self.request.user.username),
'repo_type': 'git',
'repo': 'https://github.com/readthedocs/template.git',
}
@@ -412,7 +407,7 @@ def get(self, request, *args, **kwargs):
)
)), # yapf: disable
)
- return super(ImportView, self).get(request, *args, **kwargs)
+ return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
initial_data = {}
@@ -426,7 +421,7 @@ def post(self, request, *args, **kwargs):
return self.wizard_class.as_view(initial_dict=initial_data)(request)
def get_context_data(self, **kwargs):
- context = super(ImportView, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['view_csrf_token'] = get_token(self.request)
context['has_connected_accounts'] = SocialAccount.objects.filter(
user=self.request.user,
@@ -447,10 +442,7 @@ def get_queryset(self):
def get_form(self, data=None, files=None, **kwargs):
kwargs['user'] = self.request.user
- return super(
- ProjectRelationshipMixin,
- self,
- ).get_form(data, files, **kwargs)
+ return super().get_form(data, files, **kwargs)
def form_valid(self, form):
broadcast(
@@ -458,7 +450,7 @@ def form_valid(self, form):
task=tasks.symlink_subproject,
args=[self.get_project().pk],
)
- return super(ProjectRelationshipMixin, self).form_valid(form)
+ return super().form_valid(form)
def get_success_url(self):
return reverse('projects_subprojects', args=[self.get_project().slug])
@@ -467,7 +459,7 @@ def get_success_url(self):
class ProjectRelationshipList(ProjectRelationshipMixin, ListView):
def get_context_data(self, **kwargs):
- ctx = super(ProjectRelationshipList, self).get_context_data(**kwargs)
+ ctx = super().get_context_data(**kwargs)
ctx['superproject'] = self.project.superprojects.first()
return ctx
@@ -733,8 +725,9 @@ def get_success_url(self):
class DomainList(DomainMixin, ListViewWithForm):
+
def get_context_data(self, **kwargs):
- ctx = super(DomainList, self).get_context_data(**kwargs)
+ ctx = super().get_context_data(**kwargs)
# Retry validation on all domains if applicable
for domain in ctx['domain_list']:
@@ -789,7 +782,7 @@ def get_success_url(self):
def get_template_names(self):
if self.template_name:
return self.template_name
- return 'projects/integration{0}.html'.format(self.template_name_suffix)
+ return 'projects/integration{}.html'.format(self.template_name_suffix)
class IntegrationList(IntegrationMixin, ListView):
@@ -824,7 +817,7 @@ def get_template_names(self):
integration_type = self.get_integration().integration_type
suffix = self.SUFFIX_MAP.get(integration_type, integration_type)
return (
- 'projects/integration_{0}{1}.html'
+ 'projects/integration_{}{}.html'
.format(suffix, self.template_name_suffix)
)
diff --git a/readthedocs/projects/views/public.py b/readthedocs/projects/views/public.py
index 90f5ef978b1..e90bbeb3e08 100644
--- a/readthedocs/projects/views/public.py
+++ b/readthedocs/projects/views/public.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""Public project views."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Public project views."""
import json
import logging
@@ -16,9 +14,9 @@
from django.contrib import messages
from django.contrib.auth.models import User
from django.core.cache import cache
-from django.urls import reverse
-from django.http import Http404, HttpResponse, HttpResponseRedirect
+from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
+from django.urls import reverse
from django.views.decorators.cache import never_cache
from django.views.generic import DetailView, ListView
from taggit.models import Tag
@@ -26,12 +24,13 @@
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.builds.views import BuildTriggerMixin
-from readthedocs.projects.models import ImportedFile, Project
+from readthedocs.projects.models import Project
from readthedocs.search.indexes import PageIndex
from readthedocs.search.views import LOG_TEMPLATE
from .base import ProjectOnboardMixin
+
log = logging.getLogger(__name__)
search_log = logging.getLogger(__name__ + '.search')
mimetypes.add_type('application/epub+zip', '.epub')
@@ -54,7 +53,9 @@ def get_queryset(self):
if self.kwargs.get('username'):
self.user = get_object_or_404(
- User, username=self.kwargs.get('username'))
+ User,
+ username=self.kwargs.get('username'),
+ )
queryset = queryset.filter(user=self.user)
else:
self.user = None
@@ -62,7 +63,7 @@ def get_queryset(self):
return queryset
def get_context_data(self, **kwargs):
- context = super(ProjectIndex, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
context['person'] = self.user
context['tag'] = self.tag
return context
@@ -82,11 +83,13 @@ def get_queryset(self):
return Project.objects.protected(self.request.user)
def get_context_data(self, **kwargs):
- context = super(ProjectDetailView, self).get_context_data(**kwargs)
+ context = super().get_context_data(**kwargs)
project = self.get_object()
context['versions'] = Version.objects.public(
- user=self.request.user, project=project)
+ user=self.request.user,
+ project=project,
+ )
protocol = 'http'
if self.request.is_secure():
@@ -94,7 +97,7 @@ def get_context_data(self, **kwargs):
version_slug = project.get_default_version()
- context['badge_url'] = '%s://%s%s?version=%s' % (
+ context['badge_url'] = '{}://{}{}?version={}'.format(
protocol,
settings.PRODUCTION_DOMAIN,
reverse('project_badge', args=[project.slug]),
@@ -112,8 +115,14 @@ def get_context_data(self, **kwargs):
def project_badge(request, project_slug):
"""Return a sweet badge for the project."""
style = request.GET.get('style', 'flat')
- if style not in ("flat", "plastic", "flat-square", "for-the-badge", "social"):
- style = "flat"
+ if style not in (
+ 'flat',
+ 'plastic',
+ 'flat-square',
+ 'for-the-badge',
+ 'social',
+ ):
+ style = 'flat'
# Get the local path to the badge files
badge_path = os.path.join(
@@ -129,10 +138,15 @@ def project_badge(request, project_slug):
file_path = badge_path % 'unknown'
version = Version.objects.public(request.user).filter(
- project__slug=project_slug, slug=version_slug).first()
+ project__slug=project_slug,
+ slug=version_slug,
+ ).first()
if version:
- last_build = version.builds.filter(type='html', state='finished').order_by('-date').first()
+ last_build = version.builds.filter(
+ type='html',
+ state='finished',
+ ).order_by('-date').first()
if last_build:
if last_build.success:
file_path = badge_path % 'passing'
@@ -146,14 +160,18 @@ def project_badge(request, project_slug):
content_type='image/svg+xml',
)
except (IOError, OSError):
- log.exception('Failed to read local filesystem while serving a docs badge')
+ log.exception(
+ 'Failed to read local filesystem while serving a docs badge',
+ )
return HttpResponse(status=503)
def project_downloads(request, project_slug):
"""A detail view for a project with various dataz."""
project = get_object_or_404(
- Project.objects.protected(request.user), slug=project_slug)
+ Project.objects.protected(request.user),
+ slug=project_slug,
+ )
versions = Version.objects.public(user=request.user, project=project)
version_data = OrderedDict()
for version in versions:
@@ -191,15 +209,21 @@ def project_download_media(request, project_slug, type_, version_slug):
privacy_level = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public')
if privacy_level == 'public' or settings.DEBUG:
path = os.path.join(
- settings.MEDIA_URL, type_, project_slug, version_slug,
- '%s.%s' % (project_slug, type_.replace('htmlzip', 'zip')))
+ settings.MEDIA_URL,
+ type_,
+ project_slug,
+ version_slug,
+ '{}.{}'.format(project_slug, type_.replace('htmlzip', 'zip')),
+ )
return HttpResponseRedirect(path)
# Get relative media path
path = (
version.project.get_production_media_path(
- type_=type_, version_slug=version_slug)
- .replace(settings.PRODUCTION_ROOT, '/prod_artifacts'))
+ type_=type_,
+ version_slug=version_slug,
+ ).replace(settings.PRODUCTION_ROOT, '/prod_artifacts')
+ )
content_type, encoding = mimetypes.guess_type(path)
content_type = content_type or 'application/octet-stream'
response = HttpResponse(content_type=content_type)
@@ -207,8 +231,11 @@ def project_download_media(request, project_slug, type_, version_slug):
response['Content-Encoding'] = encoding
response['X-Accel-Redirect'] = path
# Include version in filename; this fixes a long-standing bug
- filename = '%s-%s.%s' % (
- project_slug, version_slug, path.split('.')[-1])
+ filename = '{}-{}.{}'.format(
+ project_slug,
+ version_slug,
+ path.split('.')[-1],
+ )
response['Content-Disposition'] = 'filename=%s' % filename
return response
@@ -231,7 +258,8 @@ def elastic_project_search(request, project_slug):
version=version_slug or '',
language='',
msg=query or '',
- ))
+ ),
+ )
if query:
@@ -243,22 +271,22 @@ def elastic_project_search(request, project_slug):
{'match': {'title': {'query': query, 'boost': 10}}},
{'match': {'headers': {'query': query, 'boost': 5}}},
{'match': {'content': {'query': query}}},
- ]
- }
+ ],
+ },
},
'highlight': {
'fields': {
'title': {},
'headers': {},
'content': {},
- }
+ },
},
'fields': ['title', 'project', 'version', 'path'],
'filter': {
'and': [
{'term': {'project': project_slug}},
{'term': {'version': version_slug}},
- ]
+ ],
},
'size': 50, # TODO: Support pagination.
}
@@ -295,10 +323,15 @@ def project_versions(request, project_slug):
Shows the available versions and lets the user choose which ones to build.
"""
project = get_object_or_404(
- Project.objects.protected(request.user), slug=project_slug)
+ Project.objects.protected(request.user),
+ slug=project_slug,
+ )
versions = Version.objects.public(
- user=request.user, project=project, only_active=False)
+ user=request.user,
+ project=project,
+ only_active=False,
+ )
active_versions = versions.filter(active=True)
inactive_versions = versions.filter(active=False)
@@ -324,7 +357,9 @@ def project_versions(request, project_slug):
def project_analytics(request, project_slug):
"""Have a analytics API placeholder."""
project = get_object_or_404(
- Project.objects.protected(request.user), slug=project_slug)
+ Project.objects.protected(request.user),
+ slug=project_slug,
+ )
analytics_cache = cache.get('analytics:%s' % project_slug)
if analytics_cache:
analytics = json.loads(analytics_cache)
@@ -332,8 +367,10 @@ def project_analytics(request, project_slug):
try:
resp = requests.get(
'{host}/api/v1/index/1/heatmap/'.format(
- host=settings.GROK_API_HOST),
- params={'project': project.slug, 'days': 7, 'compare': True})
+ host=settings.GROK_API_HOST,
+ ),
+ params={'project': project.slug, 'days': 7, 'compare': True},
+ )
analytics = resp.json()
cache.set('analytics:%s' % project_slug, resp.content, 1800)
except requests.exceptions.RequestException:
@@ -344,12 +381,18 @@ def project_analytics(request, project_slug):
reversed(
sorted(
list(analytics['page'].items()),
- key=operator.itemgetter(1))))
+ key=operator.itemgetter(1),
+ ),
+ ),
+ )
version_list = list(
reversed(
sorted(
list(analytics['version'].items()),
- key=operator.itemgetter(1))))
+ key=operator.itemgetter(1),
+ ),
+ ),
+ )
else:
page_list = []
version_list = []
@@ -375,9 +418,13 @@ def project_analytics(request, project_slug):
def project_embed(request, project_slug):
"""Have a content API placeholder."""
project = get_object_or_404(
- Project.objects.protected(request.user), slug=project_slug)
+ Project.objects.protected(request.user),
+ slug=project_slug,
+ )
version = project.versions.get(slug=LATEST)
- files = version.imported_files.filter(name__endswith='.html').order_by('path')
+ files = version.imported_files.filter(
+ name__endswith='.html',
+ ).order_by('path')
return render(
request,
diff --git a/readthedocs/redirects/admin.py b/readthedocs/redirects/admin.py
index 6bd2d73470d..4ce0239d48f 100644
--- a/readthedocs/redirects/admin.py
+++ b/readthedocs/redirects/admin.py
@@ -1,8 +1,9 @@
-"""Django admin configuration for the redirects app."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Django admin configuration for the redirects app."""
from django.contrib import admin
+
from .models import Redirect
diff --git a/readthedocs/redirects/managers.py b/readthedocs/redirects/managers.py
index 9c0f1bf47fa..37e10890cc9 100644
--- a/readthedocs/redirects/managers.py
+++ b/readthedocs/redirects/managers.py
@@ -1,15 +1,20 @@
+# -*- coding: utf-8 -*-
+
"""Manager and queryset for the redirects app."""
-from __future__ import absolute_import
from django.db.models import Manager
from django.db.models.query import QuerySet
class RedirectQuerySet(QuerySet):
+
def get_redirect_path(self, path, language=None, version_slug=None):
for redirect in self.select_related('project'):
new_path = redirect.get_redirect_path(
- path=path, language=language, version_slug=version_slug)
+ path=path,
+ language=language,
+ version_slug=version_slug,
+ )
if new_path:
return new_path
diff --git a/readthedocs/redirects/migrations/0001_initial.py b/readthedocs/redirects/migrations/0001_initial.py
index 010f36342b3..0bb2fb946eb 100644
--- a/readthedocs/redirects/migrations/0001_initial.py
+++ b/readthedocs/redirects/migrations/0001_initial.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from __future__ import absolute_import
-from django.db import models, migrations
+from django.db import migrations, models
class Migration(migrations.Migration):
diff --git a/readthedocs/redirects/migrations/0002_add_missing_model_change_migrations.py b/readthedocs/redirects/migrations/0002_add_missing_model_change_migrations.py
index a837e6fb146..e1d83010c0a 100644
--- a/readthedocs/redirects/migrations/0002_add_missing_model_change_migrations.py
+++ b/readthedocs/redirects/migrations/0002_add_missing_model_change_migrations.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-31 11:25
-from __future__ import unicode_literals
-
from django.db import migrations, models
diff --git a/readthedocs/redirects/models.py b/readthedocs/redirects/models.py
index cbd080ca28c..7945ecb5a82 100644
--- a/readthedocs/redirects/models.py
+++ b/readthedocs/redirects/models.py
@@ -1,22 +1,23 @@
+# -*- coding: utf-8 -*-
+
"""Django models for the redirects app."""
-from __future__ import absolute_import
-from builtins import object
+import logging
+import re
+
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
-import logging
-import re
from readthedocs.core.resolver import resolve_path
from readthedocs.projects.models import Project
+
from .managers import RedirectManager
log = logging.getLogger(__name__)
-
HTTP_STATUS_CHOICES = (
(301, _('301 - Permanent Redirect')),
(302, _('302 - Temporary Redirect')),
@@ -40,12 +41,14 @@
# make sense for "Prefix Redirects" since the from URL is considered after the
# ``/$lang/$version/`` part. Also, there is a feature for the "Exact
# Redirects" that should be mentioned here: the usage of ``$rest``
-from_url_helptext = _('Absolute path, excluding the domain. '
- 'Example: /docs/ or /install.html'
- )
-to_url_helptext = _('Absolute or relative URL. Example: '
- '/tutorial/install.html'
- )
+from_url_helptext = _(
+ 'Absolute path, excluding the domain. '
+ 'Example: /docs/ or /install.html',
+)
+to_url_helptext = _(
+ 'Absolute or relative URL. Example: '
+ '/tutorial/install.html',
+)
redirect_type_helptext = _('The type of redirect you wish to use.')
@@ -54,21 +57,40 @@ class Redirect(models.Model):
"""A HTTP redirect associated with a Project."""
- project = models.ForeignKey(Project, verbose_name=_('Project'),
- related_name='redirects')
-
- redirect_type = models.CharField(_('Redirect Type'), max_length=255, choices=TYPE_CHOICES,
- help_text=redirect_type_helptext)
-
- from_url = models.CharField(_('From URL'), max_length=255,
- db_index=True, help_text=from_url_helptext, blank=True)
-
- to_url = models.CharField(_('To URL'), max_length=255,
- db_index=True, help_text=to_url_helptext, blank=True)
-
- http_status = models.SmallIntegerField(_('HTTP Status'),
- choices=HTTP_STATUS_CHOICES,
- default=301)
+ project = models.ForeignKey(
+ Project,
+ verbose_name=_('Project'),
+ related_name='redirects',
+ )
+
+ redirect_type = models.CharField(
+ _('Redirect Type'),
+ max_length=255,
+ choices=TYPE_CHOICES,
+ help_text=redirect_type_helptext,
+ )
+
+ from_url = models.CharField(
+ _('From URL'),
+ max_length=255,
+ db_index=True,
+ help_text=from_url_helptext,
+ blank=True,
+ )
+
+ to_url = models.CharField(
+ _('To URL'),
+ max_length=255,
+ db_index=True,
+ help_text=to_url_helptext,
+ blank=True,
+ )
+
+ http_status = models.SmallIntegerField(
+ _('HTTP Status'),
+ choices=HTTP_STATUS_CHOICES,
+ default=301,
+ )
status = models.BooleanField(choices=STATUS_CHOICES, default=True)
create_dt = models.DateTimeField(auto_now_add=True)
@@ -76,7 +98,7 @@ class Redirect(models.Model):
objects = RedirectManager()
- class Meta(object):
+ class Meta:
verbose_name = _('redirect')
verbose_name_plural = _('redirects')
ordering = ('-update_dt',)
@@ -86,10 +108,12 @@ def __str__(self):
if self.redirect_type in ['prefix', 'page', 'exact']:
return redirect_text.format(
type=self.get_redirect_type_display(),
- from_to_url=self.get_from_to_url_display()
+ from_to_url=self.get_from_to_url_display(),
)
- return ugettext('Redirect: {}'.format(
- self.get_redirect_type_display())
+ return ugettext(
+ 'Redirect: {}'.format(
+ self.get_redirect_type_display(),
+ ),
)
def get_from_to_url_display(self):
@@ -99,11 +123,11 @@ def get_from_to_url_display(self):
if self.redirect_type == 'prefix':
to_url = '/{lang}/{version}/'.format(
lang=self.project.language,
- version=self.project.default_version
+ version=self.project.default_version,
)
return '{from_url} -> {to_url}'.format(
from_url=from_url,
- to_url=to_url
+ to_url=to_url,
)
return ''
@@ -119,13 +143,19 @@ def get_full_path(self, filename, language=None, version_slug=None):
return filename
return resolve_path(
- project=self.project, language=language,
- version_slug=version_slug, filename=filename
+ project=self.project,
+ language=language,
+ version_slug=version_slug,
+ filename=filename,
)
def get_redirect_path(self, path, language=None, version_slug=None):
- method = getattr(self, 'redirect_{type}'.format(
- type=self.redirect_type))
+ method = getattr(
+ self,
+ 'redirect_{type}'.format(
+ type=self.redirect_type,
+ ),
+ )
return method(path, language=language, version_slug=version_slug)
def redirect_prefix(self, path, language=None, version_slug=None):
@@ -135,7 +165,8 @@ def redirect_prefix(self, path, language=None, version_slug=None):
to = self.get_full_path(
filename=cut_path,
language=language,
- version_slug=version_slug)
+ version_slug=version_slug,
+ )
return to
def redirect_page(self, path, language=None, version_slug=None):
@@ -144,7 +175,8 @@ def redirect_page(self, path, language=None, version_slug=None):
to = self.get_full_path(
filename=self.to_url.lstrip('/'),
language=language,
- version_slug=version_slug)
+ version_slug=version_slug,
+ )
return to
def redirect_exact(self, path, language=None, version_slug=None):
@@ -171,7 +203,8 @@ def redirect_sphinx_html(self, path, language=None, version_slug=None):
return self.get_full_path(
filename=to,
language=language,
- version_slug=version_slug)
+ version_slug=version_slug,
+ )
def redirect_sphinx_htmldir(self, path, language=None, version_slug=None):
if path.endswith('.html'):
@@ -181,4 +214,5 @@ def redirect_sphinx_htmldir(self, path, language=None, version_slug=None):
return self.get_full_path(
filename=to,
language=language,
- version_slug=version_slug)
+ version_slug=version_slug,
+ )
diff --git a/readthedocs/redirects/utils.py b/readthedocs/redirects/utils.py
index 1edc628626a..ce1d7514083 100644
--- a/readthedocs/redirects/utils.py
+++ b/readthedocs/redirects/utils.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Redirection view support.
@@ -7,11 +9,11 @@
These are not used directly as views; they are instead included into 404
handlers, so that redirects only take effect if no other view matches.
"""
-from __future__ import absolute_import
-from django.http import HttpResponseRedirect
import logging
import re
+from django.http import HttpResponseRedirect
+
from readthedocs.constants import LANGUAGES_REGEX
from readthedocs.projects.models import Project
@@ -37,7 +39,8 @@ def project_and_path_from_request(request, path):
# docs prefix.
match = re.match(
r'^/docs/(?P[^/]+)(?P/.*)$',
- path)
+ path,
+ )
if match:
project_slug = match.groupdict()['project_slug']
path = match.groupdict()['path']
@@ -56,7 +59,8 @@ def project_and_path_from_request(request, path):
def language_and_version_from_path(path):
match = re.match(
r'^/(?P%s)/(?P[^/]+)(?P/.*)$' % LANGUAGES_REGEX,
- path)
+ path,
+ )
if match:
language = match.groupdict()['language']
version_slug = match.groupdict()['version_slug']
@@ -76,7 +80,10 @@ def get_redirect_response(request, path):
language, version_slug, path = language_and_version_from_path(path)
new_path = project.redirects.get_redirect_path(
- path=path, language=language, version_slug=version_slug)
+ path=path,
+ language=language,
+ version_slug=version_slug,
+ )
if new_path is None:
return None
diff --git a/readthedocs/restapi/client.py b/readthedocs/restapi/client.py
index 83f5b861d83..53428b707fd 100644
--- a/readthedocs/restapi/client.py
+++ b/readthedocs/restapi/client.py
@@ -2,13 +2,6 @@
"""Simple client to access our API with Slumber credentials."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
import requests
@@ -18,6 +11,7 @@
from rest_framework.renderers import JSONRenderer
from slumber import API, serialize
+
log = logging.getLogger(__name__)
PRODUCTION_DOMAIN = getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org')
@@ -28,7 +22,7 @@
class DrfJsonSerializer(serialize.JsonSerializer):
- """Additional serialization help from the DRF renderer"""
+ """Additional serialization help from the DRF renderer."""
key = 'json-drf'
diff --git a/readthedocs/restapi/permissions.py b/readthedocs/restapi/permissions.py
index 615872d307e..93d4695a7cc 100644
--- a/readthedocs/restapi/permissions.py
+++ b/readthedocs/restapi/permissions.py
@@ -1,6 +1,6 @@
-"""Defines access permissions for the API."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Defines access permissions for the API."""
from rest_framework import permissions
@@ -51,12 +51,15 @@ class APIPermission(permissions.IsAuthenticatedOrReadOnly):
"""
def has_permission(self, request, view):
- has_perm = super(APIPermission, self).has_permission(request, view)
+ has_perm = super().has_permission(request, view)
return has_perm or (request.user and request.user.is_staff)
def has_object_permission(self, request, view, obj):
- has_perm = super(APIPermission, self).has_object_permission(
- request, view, obj)
+ has_perm = super().has_object_permission(
+ request,
+ view,
+ obj,
+ )
return has_perm or (request.user and request.user.is_staff)
diff --git a/readthedocs/restapi/serializers.py b/readthedocs/restapi/serializers.py
index 264955948b1..380f2209368 100644
--- a/readthedocs/restapi/serializers.py
+++ b/readthedocs/restapi/serializers.py
@@ -1,27 +1,31 @@
-"""Defines serializers for each of our models."""
-
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
-from builtins import object
+"""Defines serializers for each of our models."""
from allauth.socialaccount.models import SocialAccount
from rest_framework import serializers
from readthedocs.builds.models import Build, BuildCommandResult, Version
from readthedocs.oauth.models import RemoteOrganization, RemoteRepository
-from readthedocs.projects.models import Project, Domain
+from readthedocs.projects.models import Domain, Project
class ProjectSerializer(serializers.ModelSerializer):
canonical_url = serializers.ReadOnlyField(source='get_docs_url')
- class Meta(object):
+ class Meta:
model = Project
fields = (
'id',
- 'name', 'slug', 'description', 'language',
- 'programming_language', 'repo', 'repo_type',
- 'default_version', 'default_branch',
+ 'name',
+ 'slug',
+ 'description',
+ 'language',
+ 'programming_language',
+ 'repo',
+ 'repo_type',
+ 'default_version',
+ 'default_branch',
'documentation_type',
'users',
'canonical_url',
@@ -76,13 +80,16 @@ class VersionSerializer(serializers.ModelSerializer):
project = ProjectSerializer()
downloads = serializers.DictField(source='get_downloads', read_only=True)
- class Meta(object):
+ class Meta:
model = Version
fields = (
'id',
- 'project', 'slug',
- 'identifier', 'verbose_name',
- 'active', 'built',
+ 'project',
+ 'slug',
+ 'identifier',
+ 'verbose_name',
+ 'active',
+ 'built',
'downloads',
'type',
)
@@ -99,7 +106,7 @@ class BuildCommandSerializer(serializers.ModelSerializer):
run_time = serializers.ReadOnlyField()
- class Meta(object):
+ class Meta:
model = BuildCommandResult
exclude = ('')
@@ -117,7 +124,7 @@ class BuildSerializer(serializers.ModelSerializer):
# https://github.com/dmkoch/django-jsonfield/issues/188#issuecomment-300439829
config = serializers.JSONField(required=False)
- class Meta(object):
+ class Meta:
model = Build
# `_config` should be excluded to avoid conflicts with `config`
exclude = ('builder', '_config')
@@ -142,7 +149,7 @@ class SearchIndexSerializer(serializers.Serializer):
class DomainSerializer(serializers.ModelSerializer):
project = ProjectSerializer()
- class Meta(object):
+ class Meta:
model = Domain
fields = (
'id',
@@ -156,7 +163,7 @@ class Meta(object):
class RemoteOrganizationSerializer(serializers.ModelSerializer):
- class Meta(object):
+ class Meta:
model = RemoteOrganization
exclude = ('json', 'email', 'users')
@@ -168,7 +175,7 @@ class RemoteRepositorySerializer(serializers.ModelSerializer):
organization = RemoteOrganizationSerializer()
matches = serializers.SerializerMethodField()
- class Meta(object):
+ class Meta:
model = RemoteRepository
exclude = ('json', 'users')
@@ -190,13 +197,12 @@ class SocialAccountSerializer(serializers.ModelSerializer):
avatar_url = serializers.URLField(source='get_avatar_url')
provider = ProviderSerializer(source='get_provider')
- class Meta(object):
+ class Meta:
model = SocialAccount
exclude = ('extra_data',)
def get_username(self, obj):
return (
- obj.extra_data.get('username') or
- obj.extra_data.get('login')
+ obj.extra_data.get('username') or obj.extra_data.get('login')
# FIXME: which one is GitLab?
)
diff --git a/readthedocs/restapi/signals.py b/readthedocs/restapi/signals.py
index 6b6d0b3955f..65509fc551d 100644
--- a/readthedocs/restapi/signals.py
+++ b/readthedocs/restapi/signals.py
@@ -1,9 +1,10 @@
-"""We define custom Django signals to trigger when a footer is rendered."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""We define custom Django signals to trigger when a footer is rendered."""
import django.dispatch
+
footer_response = django.dispatch.Signal(
- providing_args=["request", "context", "response_data"]
+ providing_args=['request', 'context', 'response_data'],
)
diff --git a/readthedocs/restapi/urls.py b/readthedocs/restapi/urls.py
index c8cdf6cd21e..7a0d5c54e2d 100644
--- a/readthedocs/restapi/urls.py
+++ b/readthedocs/restapi/urls.py
@@ -2,13 +2,6 @@
"""Define routes between URL paths and views/endpoints."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
from django.conf import settings
from django.conf.urls import include, url
from rest_framework import routers
@@ -35,6 +28,7 @@
VersionViewSet,
)
+
router = routers.DefaultRouter()
router.register(r'build', BuildViewSet, basename='build')
router.register(r'command', BuildCommandViewSet, basename='buildcommandresult')
@@ -101,26 +95,30 @@
integration_urls = [
url(
- r'webhook/github/(?P{project_slug})/$'
- .format(**pattern_opts),
+ r'webhook/github/(?P{project_slug})/$'.format(
+ **pattern_opts
+ ),
integrations.GitHubWebhookView.as_view(),
name='api_webhook_github',
),
url(
- r'webhook/gitlab/(?P{project_slug})/$'
- .format(**pattern_opts),
+ r'webhook/gitlab/(?P{project_slug})/$'.format(
+ **pattern_opts
+ ),
integrations.GitLabWebhookView.as_view(),
name='api_webhook_gitlab',
),
url(
- r'webhook/bitbucket/(?P{project_slug})/$'
- .format(**pattern_opts),
+ r'webhook/bitbucket/(?P{project_slug})/$'.format(
+ **pattern_opts
+ ),
integrations.BitbucketWebhookView.as_view(),
name='api_webhook_bitbucket',
),
url(
- r'webhook/generic/(?P{project_slug})/$'
- .format(**pattern_opts),
+ r'webhook/generic/(?P{project_slug})/$'.format(
+ **pattern_opts
+ ),
integrations.APIWebhookView.as_view(),
name='api_webhook_generic',
),
diff --git a/readthedocs/restapi/utils.py b/readthedocs/restapi/utils.py
index 8637cd1779b..69c5dcfbc7c 100644
--- a/readthedocs/restapi/utils.py
+++ b/readthedocs/restapi/utils.py
@@ -1,12 +1,6 @@
# -*- coding: utf-8 -*-
-"""Utility functions that are used by both views and celery tasks."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Utility functions that are used by both views and celery tasks."""
import hashlib
import logging
@@ -25,13 +19,15 @@
from readthedocs.builds.models import Version
from readthedocs.search.indexes import PageIndex, ProjectIndex, SectionIndex
+
log = logging.getLogger(__name__)
def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
"""Update the database with the current versions from the repository."""
old_version_values = project.versions.filter(type=type).values_list(
- 'verbose_name', 'identifier'
+ 'verbose_name',
+ 'identifier',
)
old_versions = dict(old_version_values)
@@ -49,7 +45,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
slug=STABLE,
version_id=version_id,
verbose_name=version_name,
- type_=type
+ type_=type,
)
if created:
added.add(created_version.slug)
@@ -60,7 +56,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
slug=LATEST,
version_id=version_id,
verbose_name=version_name,
- type_=type
+ type_=type,
)
if created:
added.add(created_version.slug)
@@ -71,11 +67,13 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
else:
# Update slug with new identifier
Version.objects.filter(
- project=project, verbose_name=version_name).update(
- identifier=version_id,
- type=type,
- machine=False,
- ) # noqa
+ project=project,
+ verbose_name=version_name,
+ ).update(
+ identifier=version_id,
+ type=type,
+ machine=False,
+ ) # noqa
log.info(
'(Sync Versions) Updated Version: [%s=%s] ',
@@ -93,9 +91,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
added.add(created_version.slug)
if not has_user_stable:
stable_version = (
- project.versions
- .filter(slug=STABLE, type=type)
- .first()
+ project.versions.filter(slug=STABLE, type=type).first()
)
if stable_version:
# Put back the RTD's stable version
@@ -103,9 +99,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
stable_version.save()
if not has_user_latest:
latest_version = (
- project.versions
- .filter(slug=LATEST, type=type)
- .first()
+ project.versions.filter(slug=LATEST, type=type).first()
)
if latest_version:
# Put back the RTD's latest version
@@ -120,11 +114,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin
def set_or_create_version(project, slug, version_id, verbose_name, type_):
"""Search or create a version and set its machine attribute to false."""
- version = (
- project.versions
- .filter(slug=slug)
- .first()
- )
+ version = (project.versions.filter(slug=slug).first())
if version:
version.identifier = version_id
version.machine = False
@@ -146,12 +136,10 @@ def delete_versions(project, version_data):
# We use verbose_name for tags
# because several tags can point to the same identifier.
versions_tags = [
- version['verbose_name']
- for version in version_data.get('tags', [])
+ version['verbose_name'] for version in version_data.get('tags', [])
]
versions_branches = [
- version['identifier']
- for version in version_data.get('branches', [])
+ version['identifier'] for version in version_data.get('branches', [])
]
to_delete_qs = project.versions.all()
to_delete_qs = to_delete_qs.exclude(
@@ -175,8 +163,14 @@ def delete_versions(project, version_data):
def index_search_request(
- version, page_list, commit, project_scale, page_scale, section=True,
- delete=True):
+ version,
+ page_list,
+ commit,
+ project_scale,
+ page_scale,
+ section=True,
+ delete=True,
+):
"""
Update search indexes with build output JSON.
@@ -206,7 +200,8 @@ def index_search_request(
'url': project.get_absolute_url(),
'tags': None,
'weight': project_scale,
- })
+ },
+ )
page_obj = PageIndex()
section_obj = SectionIndex()
diff --git a/readthedocs/restapi/views/core_views.py b/readthedocs/restapi/views/core_views.py
index 08fc9e7d764..1e4afdd8a7b 100644
--- a/readthedocs/restapi/views/core_views.py
+++ b/readthedocs/restapi/views/core_views.py
@@ -1,17 +1,16 @@
-"""Utility endpoints relating to canonical urls, embedded content, etc."""
+# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+"""Utility endpoints relating to canonical urls, embedded content, etc."""
+from django.shortcuts import get_object_or_404
from rest_framework import decorators, permissions, status
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
-from django.shortcuts import get_object_or_404
-
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
-from readthedocs.projects.models import Project
from readthedocs.core.templatetags.core_tags import make_document_url
+from readthedocs.projects.models import Project
@decorators.api_view(['GET'])
@@ -24,18 +23,26 @@ def docurl(request):
Example::
GET https://readthedocs.org/api/v2/docurl/?project=requests&version=latest&doc=index
-
"""
project = request.GET.get('project')
version = request.GET.get('version', LATEST)
doc = request.GET.get('doc', 'index')
if project is None:
- return Response({'error': 'Need project and doc'}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {'error': 'Need project and doc'},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
project = get_object_or_404(Project, slug=project)
version = get_object_or_404(
- Version.objects.public(request.user, project=project, only_active=False),
- slug=version)
+ Version.objects
+ .public(request.user, project=project, only_active=False),
+ slug=version,
+ )
return Response({
- 'url': make_document_url(project=project, version=version.slug, page=doc)
+ 'url': make_document_url(
+ project=project,
+ version=version.slug,
+ page=doc,
+ ),
})
diff --git a/readthedocs/restapi/views/footer_views.py b/readthedocs/restapi/views/footer_views.py
index f09ceaff527..bf66f5f80b2 100644
--- a/readthedocs/restapi/views/footer_views.py
+++ b/readthedocs/restapi/views/footer_views.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
-"""Endpoint to generate footer HTML."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Endpoint to generate footer HTML."""
-import six
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.template import loader as template_loader
@@ -17,7 +14,9 @@
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project
from readthedocs.projects.version_handling import (
- highest_version, parse_version_failsafe)
+ highest_version,
+ parse_version_failsafe,
+)
from readthedocs.restapi.signals import footer_response
@@ -35,10 +34,11 @@ def get_version_compare_data(project, base_version=None):
versions_qs = versions_qs.filter(type=TAG)
highest_version_obj, highest_version_comparable = highest_version(
- versions_qs)
+ versions_qs,
+ )
ret_val = {
- 'project': six.text_type(highest_version_obj),
- 'version': six.text_type(highest_version_comparable),
+ 'project': str(highest_version_obj),
+ 'version': str(highest_version_comparable),
'is_highest': True,
}
if highest_version_obj:
@@ -47,12 +47,14 @@ def get_version_compare_data(project, base_version=None):
if base_version and base_version.slug != LATEST:
try:
base_version_comparable = parse_version_failsafe(
- base_version.verbose_name)
+ base_version.verbose_name,
+ )
if base_version_comparable:
# This is only place where is_highest can get set. All error
# cases will be set to True, for non- standard versions.
ret_val['is_highest'] = (
- base_version_comparable >= highest_version_comparable)
+ base_version_comparable >= highest_version_comparable
+ )
else:
ret_val['is_highest'] = True
except (Version.DoesNotExist, TypeError):
@@ -84,13 +86,19 @@ def footer_html(request):
project = get_object_or_404(Project, slug=project_slug)
version = get_object_or_404(
Version.objects.public(
- request.user, project=project, only_active=False),
- slug__iexact=version_slug)
+ request.user,
+ project=project,
+ only_active=False,
+ ),
+ slug__iexact=version_slug,
+ )
main_project = project.main_language_project or project
if page_slug and page_slug != 'index':
- if (main_project.documentation_type == 'sphinx_htmldir' or
- main_project.documentation_type == 'mkdocs'):
+ if (
+ main_project.documentation_type == 'sphinx_htmldir' or
+ main_project.documentation_type == 'mkdocs'
+ ):
path = page_slug + '/'
elif main_project.documentation_type == 'sphinx_singlehtml':
path = 'index.html#document-' + page_slug
diff --git a/readthedocs/restapi/views/integrations.py b/readthedocs/restapi/views/integrations.py
index 9a27e2ef9af..7f44fb4f11a 100644
--- a/readthedocs/restapi/views/integrations.py
+++ b/readthedocs/restapi/views/integrations.py
@@ -1,18 +1,11 @@
# -*- coding: utf-8 -*-
-"""Endpoints integrating with Github, Bitbucket, and other webhooks."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Endpoints integrating with Github, Bitbucket, and other webhooks."""
import json
import logging
import re
-import six
from django.shortcuts import get_object_or_404
from rest_framework import permissions, status
from rest_framework.exceptions import NotFound, ParseError
@@ -30,6 +23,7 @@
from readthedocs.integrations.utils import normalize_request_payload
from readthedocs.projects.models import Project
+
log = logging.getLogger(__name__)
GITHUB_EVENT_HEADER = 'HTTP_X_GITHUB_EVENT'
@@ -43,7 +37,7 @@
BITBUCKET_PUSH = 'repo:push'
-class WebhookMixin(object):
+class WebhookMixin:
"""Base class for Webhook mixins."""
@@ -75,7 +69,7 @@ def get_project(self, **kwargs):
def finalize_response(self, req, *args, **kwargs):
"""If the project was set on POST, store an HTTP exchange."""
- resp = super(WebhookMixin, self).finalize_response(req, *args, **kwargs)
+ resp = super().finalize_response(req, *args, **kwargs)
if hasattr(self, 'project') and self.project:
HttpExchange.objects.from_exchange(
req,
@@ -133,12 +127,17 @@ def get_response_push(self, project, branches):
"""
to_build, not_building = build_branches(project, branches)
if not_building:
- log.info('Skipping project branches: project=%s branches=%s',
- project, branches)
+ log.info(
+ 'Skipping project branches: project=%s branches=%s',
+ project,
+ branches,
+ )
triggered = True if to_build else False
- return {'build_triggered': triggered,
- 'project': project.slug,
- 'versions': list(to_build)}
+ return {
+ 'build_triggered': triggered,
+ 'project': project.slug,
+ 'versions': list(to_build),
+ }
def sync_versions(self, project):
version = sync_versions(project)
@@ -180,7 +179,7 @@ def get_data(self):
return json.loads(self.request.data['payload'])
except (ValueError, KeyError):
pass
- return super(GitHubWebhookView, self).get_data()
+ return super().get_data()
def handle_webhook(self):
# Get event and trigger other webhook events
@@ -189,7 +188,7 @@ def handle_webhook(self):
Project,
project=self.project,
data=self.data,
- event=event
+ event=event,
)
# Handle push events and trigger builds
if event == GITHUB_PUSH:
@@ -245,7 +244,7 @@ def handle_webhook(self):
Project,
project=self.project,
data=self.request.data,
- event=event
+ event=event,
)
# Handle push events and trigger builds
if event in (GITLAB_PUSH, GITLAB_TAG_PUSH):
@@ -306,16 +305,16 @@ def handle_webhook(self):
"""
Handle BitBucket events for push.
- BitBucket doesn't have a separate event for creation/deletion,
- instead it sets the new attribute (null if it is a deletion)
- and the old attribute (null if it is a creation).
+ BitBucket doesn't have a separate event for creation/deletion, instead
+ it sets the new attribute (null if it is a deletion) and the old
+ attribute (null if it is a creation).
"""
event = self.request.META.get(BITBUCKET_EVENT_HEADER, BITBUCKET_PUSH)
webhook_bitbucket.send(
Project,
project=self.project,
data=self.request.data,
- event=event
+ event=event,
)
if event == BITBUCKET_PUSH:
try:
@@ -350,8 +349,7 @@ class IsAuthenticatedOrHasToken(permissions.IsAuthenticated):
"""
def has_permission(self, request, view):
- has_perm = (super(IsAuthenticatedOrHasToken, self)
- .has_permission(request, view))
+ has_perm = (super().has_permission(request, view))
return has_perm or 'token' in request.data
@@ -380,9 +378,11 @@ def get_project(self, **kwargs):
# If the user is not an admin of the project, fall back to token auth
if self.request.user.is_authenticated:
try:
- return (Project.objects
- .for_admin_user(self.request.user)
- .get(**kwargs))
+ return (
+ Project.objects.for_admin_user(
+ self.request.user,
+ ).get(**kwargs)
+ )
except Project.DoesNotExist:
pass
# Recheck project and integration relationship during token auth check
@@ -402,9 +402,9 @@ def handle_webhook(self):
try:
branches = self.request.data.get(
'branches',
- [self.project.get_default_branch()]
+ [self.project.get_default_branch()],
)
- if isinstance(branches, six.string_types):
+ if isinstance(branches, str):
branches = [branches]
return self.get_response_push(self.project, branches)
except TypeError:
diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py
index 3e925031472..60a634c7b12 100644
--- a/readthedocs/restapi/views/model_views.py
+++ b/readthedocs/restapi/views/model_views.py
@@ -1,13 +1,10 @@
# -*- coding: utf-8 -*-
-"""Endpoints for listing Projects, Versions, Builds, etc."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Endpoints for listing Projects, Versions, Builds, etc."""
import logging
from allauth.socialaccount.models import SocialAccount
-from builtins import str
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from rest_framework import decorators, permissions, status, viewsets
@@ -25,12 +22,25 @@
from .. import utils as api_utils
from ..permissions import (
- APIPermission, APIRestrictedPermission, IsOwner, RelatedProjectIsOwner)
+ APIPermission,
+ APIRestrictedPermission,
+ IsOwner,
+ RelatedProjectIsOwner,
+)
from ..serializers import (
- BuildAdminSerializer, BuildCommandSerializer, BuildSerializer,
- DomainSerializer, ProjectAdminSerializer, ProjectSerializer,
- RemoteOrganizationSerializer, RemoteRepositorySerializer,
- SocialAccountSerializer, VersionAdminSerializer, VersionSerializer)
+ BuildAdminSerializer,
+ BuildCommandSerializer,
+ BuildSerializer,
+ DomainSerializer,
+ ProjectAdminSerializer,
+ ProjectSerializer,
+ RemoteOrganizationSerializer,
+ RemoteRepositorySerializer,
+ SocialAccountSerializer,
+ VersionAdminSerializer,
+ VersionSerializer,
+)
+
log = logging.getLogger(__name__)
@@ -52,7 +62,8 @@ def render(self, data, accepted_media_type=None, renderer_context=None):
if not response or response.exception:
return data.get('detail', '').encode(self.charset)
data = render_to_string(
- 'restapi/log.txt', {'build': data}
+ 'restapi/log.txt',
+ {'build': data},
)
return data.encode(self.charset)
@@ -69,8 +80,10 @@ class UserSelectViewSet(viewsets.ModelViewSet):
def get_serializer_class(self):
try:
- if (self.request.user.is_staff and
- self.admin_serializer_class is not None):
+ if (
+ self.request.user.is_staff and
+ self.admin_serializer_class is not None
+ ):
return self.admin_serializer_class
except AttributeError:
pass
@@ -104,7 +117,9 @@ def translations(self, *_, **__):
@decorators.action(detail=True)
def subprojects(self, request, **kwargs):
project = get_object_or_404(
- Project.objects.api(request.user), pk=kwargs['pk'])
+ Project.objects.api(request.user),
+ pk=kwargs['pk'],
+ )
rels = project.subprojects.all()
children = [rel.child for rel in rels]
return Response({
@@ -114,16 +129,23 @@ def subprojects(self, request, **kwargs):
@decorators.action(detail=True)
def active_versions(self, request, **kwargs):
project = get_object_or_404(
- Project.objects.api(request.user), pk=kwargs['pk'])
+ Project.objects.api(request.user),
+ pk=kwargs['pk'],
+ )
versions = project.versions.filter(active=True)
return Response({
'versions': VersionSerializer(versions, many=True).data,
})
- @decorators.action(detail=True, permission_classes=[permissions.IsAdminUser])
+ @decorators.action(
+ detail=True,
+ permission_classes=[permissions.IsAdminUser],
+ )
def token(self, request, **kwargs):
project = get_object_or_404(
- Project.objects.api(request.user), pk=kwargs['pk'])
+ Project.objects.api(request.user),
+ pk=kwargs['pk'],
+ )
token = GitHubService.get_token_for_project(project, force_local=True)
return Response({
'token': token,
@@ -132,13 +154,16 @@ def token(self, request, **kwargs):
@decorators.action(detail=True)
def canonical_url(self, request, **kwargs):
project = get_object_or_404(
- Project.objects.api(request.user), pk=kwargs['pk'])
+ Project.objects.api(request.user),
+ pk=kwargs['pk'],
+ )
return Response({
'url': project.get_docs_url(),
})
@decorators.action(
- detail=True, permission_classes=[permissions.IsAdminUser],
+ detail=True,
+ permission_classes=[permissions.IsAdminUser],
methods=['post'],
)
def sync_versions(self, request, **kwargs): # noqa: D205
@@ -150,7 +175,9 @@ def sync_versions(self, request, **kwargs): # noqa: D205
:returns: the identifiers for the versions that have been deleted.
"""
project = get_object_or_404(
- Project.objects.api(request.user), pk=kwargs['pk'])
+ Project.objects.api(request.user),
+ pk=kwargs['pk'],
+ )
# If the currently highest non-prerelease version is active, then make
# the new latest version active as well.
@@ -166,11 +193,17 @@ def sync_versions(self, request, **kwargs): # noqa: D205
added_versions = set()
if 'tags' in data:
ret_set = api_utils.sync_versions(
- project=project, versions=data['tags'], type=TAG)
+ project=project,
+ versions=data['tags'],
+ type=TAG,
+ )
added_versions.update(ret_set)
if 'branches' in data:
ret_set = api_utils.sync_versions(
- project=project, versions=data['branches'], type=BRANCH)
+ project=project,
+ versions=data['branches'],
+ type=BRANCH,
+ )
added_versions.update(ret_set)
deleted_versions = api_utils.delete_versions(project, data)
except Exception as e:
@@ -189,13 +222,16 @@ def sync_versions(self, request, **kwargs): # noqa: D205
'Triggering new stable build: {project}:{version}'.format(
project=project.slug,
version=new_stable.identifier,
- ))
+ ),
+ )
trigger_build(project=project, version=new_stable)
# Marking the tag that is considered the new stable version as
# active and building it if it was just added.
- if (activate_new_stable and
- promoted_version.slug in added_versions):
+ if (
+ activate_new_stable and
+ promoted_version.slug in added_versions
+ ):
promoted_version.active = True
promoted_version.save()
trigger_build(project=project, version=promoted_version)
@@ -213,8 +249,14 @@ class VersionViewSet(UserSelectViewSet):
serializer_class = VersionSerializer
admin_serializer_class = VersionAdminSerializer
model = Version
- filter_fields = ('active', 'project__slug',) # django-filter<2.0.0
- filterset_fields = ('active', 'project__slug',)
+ filter_fields = (
+ 'active',
+ 'project__slug',
+ ) # django-filter<2.0.0
+ filterset_fields = (
+ 'active',
+ 'project__slug',
+ )
class BuildViewSetBase(UserSelectViewSet):
@@ -269,7 +311,9 @@ def get_queryset(self):
self.model.objects.api(self.request.user).filter(
account__provider__in=[
service.adapter.provider_id for service in registry
- ]))
+ ],
+ )
+ )
class RemoteRepositoryViewSet(viewsets.ReadOnlyModelViewSet):
@@ -295,7 +339,8 @@ def get_queryset(self):
query = query.filter(
account__provider__in=[
service.adapter.provider_id for service in registry
- ])
+ ],
+ )
return query
diff --git a/readthedocs/restapi/views/search_views.py b/readthedocs/restapi/views/search_views.py
index abe36174097..aa2acc1fdbe 100644
--- a/readthedocs/restapi/views/search_views.py
+++ b/readthedocs/restapi/views/search_views.py
@@ -1,6 +1,7 @@
+# -*- coding: utf-8 -*-
+
"""Endpoints related to searching through projects, sections, etc."""
-from __future__ import absolute_import
import logging
from rest_framework import decorators, permissions, status
@@ -10,8 +11,8 @@
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project, ProjectRelationship
-from readthedocs.search.lib import search_file, search_project, search_section
from readthedocs.restapi import utils
+from readthedocs.search.lib import search_file, search_project, search_section
log = logging.getLogger(__name__)
@@ -31,8 +32,12 @@ def index_search(request):
page_scale = 1
utils.index_search_request(
- version=version, page_list=data['page_list'], commit=commit,
- project_scale=project_scale, page_scale=page_scale)
+ version=version,
+ page_list=data['page_list'],
+ commit=commit,
+ project_scale=project_scale,
+ page_scale=page_scale,
+ )
return Response({'indexed': True})
@@ -46,20 +51,30 @@ def search(request):
version_slug = request.GET.get('version', LATEST)
query = request.GET.get('q', None)
if project_slug is None or query is None:
- return Response({'error': 'Need project and q'},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {'error': 'Need project and q'},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
try:
project = Project.objects.get(slug=project_slug)
except Project.DoesNotExist:
- return Response({'error': 'Project not found'},
- status=status.HTTP_404_NOT_FOUND)
- log.debug("(API Search) %s", query)
- results = search_file(request=request, project_slug=project_slug,
- version_slug=version_slug, query=query)
+ return Response(
+ {'error': 'Project not found'},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ log.debug('(API Search) %s', query)
+ results = search_file(
+ request=request,
+ project_slug=project_slug,
+ version_slug=version_slug,
+ query=query,
+ )
if results is None:
- return Response({'error': 'Project not found'},
- status=status.HTTP_404_NOT_FOUND)
+ return Response(
+ {'error': 'Project not found'},
+ status=status.HTTP_404_NOT_FOUND,
+ )
# Supplement result paths with domain information on project
hits = results.get('hits', {}).get('hits', [])
@@ -73,13 +88,11 @@ def search(request):
try:
subproject = project.subprojects.get(child__slug=search_project)
canonical_url = subproject.child.get_docs_url(
- version_slug=search_version
+ version_slug=search_version,
)
except ProjectRelationship.DoesNotExist:
pass
- results['hits']['hits'][n]['fields']['link'] = (
- canonical_url + path
- )
+ results['hits']['hits'][n]['fields']['link'] = (canonical_url + path)
return Response({'results': results})
@@ -90,8 +103,11 @@ def search(request):
def project_search(request):
query = request.GET.get('q', None)
if query is None:
- return Response({'error': 'Need project and q'}, status=status.HTTP_400_BAD_REQUEST)
- log.debug("(API Project Search) %s", (query))
+ return Response(
+ {'error': 'Need project and q'},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ log.debug('(API Project Search) %s', (query))
results = search_project(request=request, query=query)
return Response({'results': results})
@@ -135,12 +151,17 @@ def section_search(request):
if not query:
return Response(
{'error': 'Search term required. Use the "q" GET arg to search. '},
- status=status.HTTP_400_BAD_REQUEST)
+ status=status.HTTP_400_BAD_REQUEST,
+ )
project_slug = request.GET.get('project', None)
version_slug = request.GET.get('version', LATEST)
path = request.GET.get('path', None)
- log.debug("(API Section Search) [%s:%s] %s", project_slug, version_slug,
- query)
+ log.debug(
+ '(API Section Search) [%s:%s] %s',
+ project_slug,
+ version_slug,
+ query,
+ )
results = search_section(
request=request,
query=query,
diff --git a/readthedocs/restapi/views/task_views.py b/readthedocs/restapi/views/task_views.py
index 475fb17bda6..8bf9d3843e4 100644
--- a/readthedocs/restapi/views/task_views.py
+++ b/readthedocs/restapi/views/task_views.py
@@ -1,11 +1,6 @@
-"""Endpoints relating to task/job status, etc."""
+# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+"""Endpoints relating to task/job status, etc."""
import logging
@@ -18,11 +13,14 @@
from readthedocs.core.utils.tasks import TaskNoPermission, get_public_task_data
from readthedocs.oauth import tasks
-log = logging.getLogger(__name__)
+log = logging.getLogger(__name__)
SUCCESS_STATES = ('SUCCESS',)
-FAILURE_STATES = ('FAILURE', 'REVOKED',)
+FAILURE_STATES = (
+ 'FAILURE',
+ 'REVOKED',
+)
FINISHED_STATES = SUCCESS_STATES + FAILURE_STATES
STARTED_STATES = ('RECEIVED', 'STARTED', 'RETRY') + FINISHED_STATES
@@ -48,24 +46,19 @@ def get_status_data(task_name, state, data, error=None):
def job_status(request, task_id):
try:
task_name, state, public_data, error = get_public_task_data(
- request, task_id
+ request,
+ task_id,
)
except (TaskNoPermission, ConnectionError):
- return Response(
- get_status_data('unknown', 'PENDING', {})
- )
- return Response(
- get_status_data(task_name, state, public_data, error)
- )
+ return Response(get_status_data('unknown', 'PENDING', {}),)
+ return Response(get_status_data(task_name, state, public_data, error),)
@decorators.api_view(['POST'])
@decorators.permission_classes((permissions.IsAuthenticated,))
@decorators.renderer_classes((JSONRenderer,))
def sync_remote_repositories(request):
- result = tasks.sync_remote_repositories.delay(
- user_id=request.user.id
- )
+ result = tasks.sync_remote_repositories.delay(user_id=request.user.id,)
task_id = result.task_id
return Response({
'task_id': task_id,
diff --git a/readthedocs/rtd_tests/base.py b/readthedocs/rtd_tests/base.py
index f11ec5a49b4..e7d8f8409e0 100644
--- a/readthedocs/rtd_tests/base.py
+++ b/readthedocs/rtd_tests/base.py
@@ -1,19 +1,18 @@
+# -*- coding: utf-8 -*-
"""Base classes and mixins for unit tests."""
-from __future__ import absolute_import
-from builtins import object
+import logging
import os
import shutil
-import logging
import tempfile
from collections import OrderedDict
-from mock import patch
from django.conf import settings
-from django.test import TestCase, RequestFactory
from django.contrib.auth.models import AnonymousUser
from django.contrib.messages.storage.fallback import FallbackStorage
from django.contrib.sessions.middleware import SessionMiddleware
-import six
+from django.test import RequestFactory, TestCase
+from mock import patch
+
log = logging.getLogger(__name__)
@@ -23,7 +22,7 @@ def setUp(self):
self.original_DOCROOT = settings.DOCROOT
self.cwd = os.path.dirname(__file__)
self.build_dir = tempfile.mkdtemp()
- log.info("build dir: %s", self.build_dir)
+ log.info('build dir: %s', self.build_dir)
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
settings.DOCROOT = self.build_dir
@@ -42,7 +41,7 @@ class MockBuildTestCase(TestCase):
pass
-class RequestFactoryTestMixin(object):
+class RequestFactoryTestMixin:
"""
Adds helper methods for testing with :py:class:`RequestFactory`
@@ -107,14 +106,14 @@ def post_step(self, step, **kwargs):
if not self.url:
raise Exception('Missing wizard URL')
try:
- data = dict(
- ('{0}-{1}'.format(step, k), v)
+ data = {
+ '{}-{}'.format(step, k): v
for (k, v) in list(self.step_data[step].items())
- )
+ }
except KeyError:
pass
# Update with prefixed step data
- data['{0}-current_step'.format(self.wizard_class_slug)] = step
+ data['{}-current_step'.format(self.wizard_class_slug)] = step
view = self.wizard_class.as_view()
req = self.request(self.url, method='post', data=data, **kwargs)
resp = view(req)
@@ -146,7 +145,7 @@ def assertWizardResponse(self, response, step=None): # noqa
response.render()
self.assertContains(
response,
- u'name="{0}-current_step"'.format(self.wizard_class_slug)
+ 'name="{}-current_step"'.format(self.wizard_class_slug),
)
# We use camelCase on purpose here to conform with unittest's naming
@@ -170,4 +169,4 @@ def assertWizardFailure(self, response, field, match=None): # noqa
self.assertIn(field, response.context_data['wizard']['form'].errors)
if match is not None:
error = response.context_data['wizard']['form'].errors[field]
- self.assertRegex(six.text_type(error), match) # noqa
+ self.assertRegex(str(error), match) # noqa
diff --git a/readthedocs/rtd_tests/files/api.fjson b/readthedocs/rtd_tests/files/api.fjson
index 0e6077c56f8..7972e09c87f 100644
--- a/readthedocs/rtd_tests/files/api.fjson
+++ b/readthedocs/rtd_tests/files/api.fjson
@@ -43,4 +43,4 @@
"title": "Internationalization"
},
"metatags": ""
-}
\ No newline at end of file
+}
diff --git a/readthedocs/rtd_tests/files/conf.py b/readthedocs/rtd_tests/files/conf.py
index 4007dcfab19..11f872849dd 100644
--- a/readthedocs/rtd_tests/files/conf.py
+++ b/readthedocs/rtd_tests/files/conf.py
@@ -13,7 +13,7 @@
'.md': CommonMarkParser,
}
master_doc = 'index'
-project = u'Pip'
+project = 'Pip'
copyright = str(datetime.now().year)
version = '0.8.1'
release = '0.8.1'
@@ -23,6 +23,6 @@
html_theme = 'sphinx_rtd_theme'
file_insertion_enabled = False
latex_documents = [
- ('index', 'pip.tex', u'Pip Documentation',
- u'', 'manual'),
+ ('index', 'pip.tex', 'Pip Documentation',
+ '', 'manual'),
]
diff --git a/readthedocs/rtd_tests/fixtures/sample_repo/source/conf.py b/readthedocs/rtd_tests/fixtures/sample_repo/source/conf.py
index c6c9fcb64db..e6c4dad0f4b 100644
--- a/readthedocs/rtd_tests/fixtures/sample_repo/source/conf.py
+++ b/readthedocs/rtd_tests/fixtures/sample_repo/source/conf.py
@@ -41,8 +41,8 @@
master_doc = 'index'
# General information about the project.
-project = u'sample'
-copyright = u'2011, Dan'
+project = 'sample'
+copyright = '2011, Dan'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -180,8 +180,10 @@
# Grouping the document tree into LaTeX files. List of tuples (source start
# file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'sample.tex', u'sample Documentation',
- u'Dan', 'manual'),
+ (
+ 'index', 'sample.tex', 'sample Documentation',
+ 'Dan', 'manual',
+ ),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -213,6 +215,8 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'sample', u'sample Documentation',
- [u'Dan'], 1)
+ (
+ 'index', 'sample', 'sample Documentation',
+ ['Dan'], 1,
+ ),
]
diff --git a/readthedocs/rtd_tests/fixtures/sample_repo/source/index.rst b/readthedocs/rtd_tests/fixtures/sample_repo/source/index.rst
index d86e67de5be..164c84a69c0 100644
--- a/readthedocs/rtd_tests/fixtures/sample_repo/source/index.rst
+++ b/readthedocs/rtd_tests/fixtures/sample_repo/source/index.rst
@@ -17,4 +17,3 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
-
diff --git a/readthedocs/rtd_tests/fixtures/spec/v2/schema.yml b/readthedocs/rtd_tests/fixtures/spec/v2/schema.yml
index add1eeafe9b..3ff7b7fb730 100644
--- a/readthedocs/rtd_tests/fixtures/spec/v2/schema.yml
+++ b/readthedocs/rtd_tests/fixtures/spec/v2/schema.yml
@@ -123,7 +123,7 @@ submodules:
# List of submodules to be ignored
# Default: []
exclude: any(list(str()), enum('all'), required=False)
-
+
# Do a recursive clone?
# Default: false
recursive: bool(required=False)
diff --git a/readthedocs/rtd_tests/mocks/environment.py b/readthedocs/rtd_tests/mocks/environment.py
index 4b963b769ba..6928d4cec3c 100644
--- a/readthedocs/rtd_tests/mocks/environment.py
+++ b/readthedocs/rtd_tests/mocks/environment.py
@@ -1,12 +1,11 @@
+# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring
-from __future__ import absolute_import
-from builtins import object
import mock
-class EnvironmentMockGroup(object):
+class EnvironmentMockGroup:
- """Mock out necessary environment pieces"""
+ """Mock out necessary environment pieces."""
def __init__(self):
self.patches = {
@@ -15,43 +14,61 @@ def __init__(self):
'api': mock.patch('slumber.Resource'),
'api_v2.command': mock.patch(
'readthedocs.doc_builder.environments.api_v2.command',
- mock.Mock(**{'get.return_value': {}})),
+ mock.Mock(**{'get.return_value': {}}),
+ ),
'api_v2.build': mock.patch(
'readthedocs.doc_builder.environments.api_v2.build',
- mock.Mock(**{'get.return_value': {}})),
+ mock.Mock(**{'get.return_value': {}}),
+ ),
'api_versions': mock.patch(
- 'readthedocs.projects.models.Project.api_versions'),
+ 'readthedocs.projects.models.Project.api_versions',
+ ),
'non_blocking_lock': mock.patch(
- 'readthedocs.vcs_support.utils.NonBlockingLock.__enter__'),
+ 'readthedocs.vcs_support.utils.NonBlockingLock.__enter__',
+ ),
'append_conf': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf'),
+ 'readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf',
+ ),
'move': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.BaseSphinx.move'),
+ 'readthedocs.doc_builder.backends.sphinx.BaseSphinx.move',
+ ),
'conf_dir': mock.patch(
- 'readthedocs.projects.models.Project.conf_dir'),
+ 'readthedocs.projects.models.Project.conf_dir',
+ ),
'html_build': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.HtmlBuilder.build'),
+ 'readthedocs.doc_builder.backends.sphinx.HtmlBuilder.build',
+ ),
'html_move': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.HtmlBuilder.move'),
+ 'readthedocs.doc_builder.backends.sphinx.HtmlBuilder.move',
+ ),
'localmedia_build': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.LocalMediaBuilder.build'),
+ 'readthedocs.doc_builder.backends.sphinx.LocalMediaBuilder.build',
+ ),
'localmedia_move': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.LocalMediaBuilder.move'),
+ 'readthedocs.doc_builder.backends.sphinx.LocalMediaBuilder.move',
+ ),
'pdf_build': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.PdfBuilder.build'),
+ 'readthedocs.doc_builder.backends.sphinx.PdfBuilder.build',
+ ),
'pdf_move': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.PdfBuilder.move'),
+ 'readthedocs.doc_builder.backends.sphinx.PdfBuilder.move',
+ ),
'epub_build': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.EpubBuilder.build'),
+ 'readthedocs.doc_builder.backends.sphinx.EpubBuilder.build',
+ ),
'epub_move': mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.EpubBuilder.move'),
+ 'readthedocs.doc_builder.backends.sphinx.EpubBuilder.move',
+ ),
'move_mkdocs': mock.patch(
- 'readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.move'),
+ 'readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.move',
+ ),
'append_conf_mkdocs': mock.patch(
- 'readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.append_conf'),
+ 'readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.append_conf',
+ ),
'html_build_mkdocs': mock.patch(
- 'readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.build'),
+ 'readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.build',
+ ),
'glob': mock.patch('readthedocs.doc_builder.backends.sphinx.glob'),
'docker': mock.patch('readthedocs.doc_builder.environments.APIClient'),
@@ -60,7 +77,7 @@ def __init__(self):
self.mocks = {}
def start(self):
- """Create a patch object for class patches"""
+ """Create a patch object for class patches."""
for patch in self.patches:
self.mocks[patch] = self.patches[patch].start()
self.mocks['process'].communicate.return_value = ('', '')
@@ -78,7 +95,7 @@ def stop(self):
pass
def configure_mock(self, mock, kwargs):
- """Configure object mocks"""
+ """Configure object mocks."""
self.mocks[mock].configure_mock(**kwargs)
def __getattr__(self, name):
diff --git a/readthedocs/rtd_tests/mocks/mock_api.py b/readthedocs/rtd_tests/mocks/mock_api.py
index 84c40d7c4d1..9c6ff251515 100644
--- a/readthedocs/rtd_tests/mocks/mock_api.py
+++ b/readthedocs/rtd_tests/mocks/mock_api.py
@@ -1,14 +1,15 @@
+# -*- coding: utf-8 -*-
"""Mock versions of many API-related classes."""
-from __future__ import absolute_import
-from builtins import object
-from contextlib import contextmanager
import json
+from contextlib import contextmanager
+
import mock
+
# Mock tastypi API.
-class ProjectData(object):
+class ProjectData:
def get(self):
return dict()
@@ -18,7 +19,7 @@ def put(self, x=None):
def mock_version(repo):
"""Construct and return a class implementing the Version interface."""
- class MockVersion(object):
+ class MockVersion:
def __init__(self, x=None):
pass
@@ -71,7 +72,7 @@ def get(self, **kwargs):
return MockVersion
-class MockApi(object):
+class MockApi:
def __init__(self, repo):
self.version = mock_version(repo)
diff --git a/readthedocs/rtd_tests/mocks/paths.py b/readthedocs/rtd_tests/mocks/paths.py
index 34fa7e5953f..787eabff923 100644
--- a/readthedocs/rtd_tests/mocks/paths.py
+++ b/readthedocs/rtd_tests/mocks/paths.py
@@ -1,7 +1,8 @@
+# -*- coding: utf-8 -*-
"""Context managers to patch os.path.exists calls."""
-from __future__ import absolute_import
import os
import re
+
import mock
diff --git a/readthedocs/rtd_tests/tests/projects/test_admin_actions.py b/readthedocs/rtd_tests/tests/projects/test_admin_actions.py
index 6898c5bf136..dd25f4a13b4 100644
--- a/readthedocs/rtd_tests/tests/projects/test_admin_actions.py
+++ b/readthedocs/rtd_tests/tests/projects/test_admin_actions.py
@@ -1,8 +1,9 @@
-import mock
+# -*- coding: utf-8 -*-
import django_dynamic_fixture as fixture
+import mock
+from django import urls
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.auth.models import User
-from django import urls
from django.test import TestCase
from readthedocs.core.models import UserProfile
@@ -34,7 +35,7 @@ def test_project_ban_owner(self):
}
resp = self.client.post(
urls.reverse('admin:projects_project_changelist'),
- action_data
+ action_data,
)
self.assertTrue(self.project.users.filter(profile__banned=True).exists())
self.assertFalse(self.project.users.filter(profile__banned=False).exists())
@@ -52,14 +53,14 @@ def test_project_ban_multiple_owners(self):
}
resp = self.client.post(
urls.reverse('admin:projects_project_changelist'),
- action_data
+ action_data,
)
self.assertFalse(self.project.users.filter(profile__banned=True).exists())
self.assertEqual(self.project.users.filter(profile__banned=False).count(), 2)
@mock.patch('readthedocs.projects.admin.broadcast')
def test_project_delete(self, broadcast):
- """Test project and artifacts are removed"""
+ """Test project and artifacts are removed."""
from readthedocs.projects.tasks import remove_dirs
action_data = {
ACTION_CHECKBOX_NAME: [self.project.pk],
@@ -69,11 +70,11 @@ def test_project_delete(self, broadcast):
}
resp = self.client.post(
urls.reverse('admin:projects_project_changelist'),
- action_data
+ action_data,
)
self.assertFalse(Project.objects.filter(pk=self.project.pk).exists())
broadcast.assert_has_calls([
mock.call(
- type='app', task=remove_dirs, args=[(self.project.doc_path,)]
+ type='app', task=remove_dirs, args=[(self.project.doc_path,)],
),
])
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py
index 707a4da79c3..f5dcb1cae06 100644
--- a/readthedocs/rtd_tests/tests/test_api.py
+++ b/readthedocs/rtd_tests/tests/test_api.py
@@ -1,23 +1,14 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import base64
import datetime
import json
import mock
from allauth.socialaccount.models import SocialAccount
-from builtins import str
from django.contrib.auth.models import User
-from django.urls import reverse
from django.http import QueryDict
from django.test import TestCase
-from django.utils import six
+from django.urls import reverse
from django_dynamic_fixture import get
from rest_framework import status
from rest_framework.test import APIClient
@@ -43,6 +34,7 @@
)
from readthedocs.restapi.views.task_views import get_status_data
+
super_auth = base64.b64encode(b'super:test').decode('utf-8')
eric_auth = base64.b64encode(b'eric:test').decode('utf-8')
@@ -204,10 +196,7 @@ def test_save_same_config_using_patch(self):
)
def test_response_building(self):
- """
- The ``view docs`` attr should return a link
- to the dashboard.
- """
+ """The ``view docs`` attr should return a link to the dashboard."""
client = APIClient()
client.login(username='super', password='test')
project = get(
@@ -246,10 +235,7 @@ def test_response_building(self):
self.assertEqual(build['docs_url'], dashboard_url)
def test_response_finished_and_success(self):
- """
- The ``view docs`` attr should return a link
- to the docs.
- """
+ """The ``view docs`` attr should return a link to the docs."""
client = APIClient()
client.login(username='super', password='test')
project = get(
@@ -284,10 +270,7 @@ def test_response_finished_and_success(self):
self.assertEqual(build['docs_url'], docs_url)
def test_response_finished_and_fail(self):
- """
- The ``view docs`` attr should return a link
- to the dashboard.
- """
+ """The ``view docs`` attr should return a link to the dashboard."""
client = APIClient()
client.login(username='super', password='test')
project = get(
@@ -359,7 +342,7 @@ def test_update_build_without_permission(self):
client.force_authenticate(user=api_user)
build = get(Build, project_id=1, version_id=1, state='cloning')
resp = client.put(
- '/api/v2/build/{0}/'.format(build.pk),
+ '/api/v2/build/{}/'.format(build.pk),
{
'project': 1,
'version': 1,
@@ -381,11 +364,11 @@ def test_make_build_protected_fields(self):
api_user = get(User, staff=False, password='test')
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{0}/'.format(build.pk), format='json')
+ resp = client.get('/api/v2/build/{}/'.format(build.pk), format='json')
self.assertEqual(resp.status_code, 200)
client.force_authenticate(user=User.objects.get(username='super'))
- resp = client.get('/api/v2/build/{0}/'.format(build.pk), format='json')
+ resp = client.get('/api/v2/build/{}/'.format(build.pk), format='json')
self.assertEqual(resp.status_code, 200)
self.assertIn('builder', resp.data)
@@ -431,19 +414,19 @@ def test_get_raw_log_success(self):
BuildCommandResult,
build=build,
command='python setup.py install',
- output='Installing dependencies...'
+ output='Installing dependencies...',
)
get(
BuildCommandResult,
build=build,
command='git checkout master',
- output='Switched to branch "master"'
+ output='Switched to branch "master"',
)
client = APIClient()
api_user = get(User, user='test', password='test')
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{0}.txt'.format(build.pk))
+ resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
self.assertEqual(resp.status_code, 200)
self.assertIn('Read the Docs build information', resp.content.decode())
@@ -457,11 +440,11 @@ def test_get_raw_log_success(self):
self.assertIn('[rtd-command-info]', resp.content.decode())
self.assertIn(
'python setup.py install\nInstalling dependencies...',
- resp.content.decode()
+ resp.content.decode(),
)
self.assertIn(
'git checkout master\nSwitched to branch "master"',
- resp.content.decode()
+ resp.content.decode(),
)
def test_get_raw_log_building(self):
@@ -481,13 +464,13 @@ def test_get_raw_log_building(self):
BuildCommandResult,
build=build,
command='git checkout master',
- output='Switched to branch "master"'
+ output='Switched to branch "master"',
)
client = APIClient()
api_user = get(User, user='test', password='test')
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{0}.txt'.format(build.pk))
+ resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
self.assertEqual(resp.status_code, 200)
self.assertIn('Read the Docs build information', resp.content.decode())
@@ -501,17 +484,17 @@ def test_get_raw_log_building(self):
self.assertIn('[rtd-command-info]', resp.content.decode())
self.assertIn(
'python setup.py install\nInstalling dependencies...',
- resp.content.decode()
+ resp.content.decode(),
)
self.assertIn(
'git checkout master\nSwitched to branch "master"',
- resp.content.decode()
+ resp.content.decode(),
)
def test_get_raw_log_failure(self):
build = get(
Build, project_id=1, version_id=1,
- builder='foo', success=False, exit_code=1
+ builder='foo', success=False, exit_code=1,
)
get(
BuildCommandResult,
@@ -524,13 +507,13 @@ def test_get_raw_log_failure(self):
BuildCommandResult,
build=build,
command='git checkout master',
- output='Switched to branch "master"'
+ output='Switched to branch "master"',
)
client = APIClient()
api_user = get(User, user='test', password='test')
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{0}.txt'.format(build.pk))
+ resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
self.assertEqual(resp.status_code, 200)
self.assertIn('Read the Docs build information', resp.content.decode())
@@ -544,11 +527,11 @@ def test_get_raw_log_failure(self):
self.assertIn('[rtd-command-info]', resp.content.decode())
self.assertIn(
'python setup.py install\nInstalling dependencies...',
- resp.content.decode()
+ resp.content.decode(),
)
self.assertIn(
'git checkout master\nSwitched to branch "master"',
- resp.content.decode()
+ resp.content.decode(),
)
def test_get_invalid_raw_log(self):
@@ -556,7 +539,7 @@ def test_get_invalid_raw_log(self):
api_user = get(User, user='test', password='test')
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{0}.txt'.format(404))
+ resp = client.get('/api/v2/build/{}.txt'.format(404))
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
def test_build_filter_by_commit(self):
@@ -661,8 +644,7 @@ def test_project_features(self):
resp = client.get('/api/v2/project/%s/' % (project.pk))
self.assertEqual(resp.status_code, 200)
self.assertIn('features', resp.data)
- six.assertCountEqual(
- self,
+ self.assertCountEqual(
resp.data['features'],
[feature1.feature_id, feature2.feature_id],
)
@@ -832,11 +814,11 @@ def setUp(self):
self.project = get(Project)
self.version = get(
Version, slug='master', verbose_name='master',
- active=True, project=self.project
+ active=True, project=self.project,
)
self.version_tag = get(
Version, slug='v1.0', verbose_name='v1.0',
- active=True, project=self.project
+ active=True, project=self.project,
)
self.github_payload = {
'ref': 'master',
@@ -868,7 +850,7 @@ def test_webhook_skipped_project(self, trigger_build):
self.project.save()
response = client.post(
- '/api/v2/webhook/github/{0}/'.format(
+ '/api/v2/webhook/github/{}/'.format(
self.project.slug,
),
self.github_payload,
@@ -883,56 +865,62 @@ def test_github_webhook_for_branches(self, trigger_build):
client = APIClient()
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'master'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=self.version, project=self.project)])
+ [mock.call(force=True, version=self.version, project=self.project)],
+ )
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'non-existent'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=mock.ANY, project=self.project)])
+ [mock.call(force=True, version=mock.ANY, project=self.project)],
+ )
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'refs/heads/master'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=self.version, project=self.project)])
+ [mock.call(force=True, version=self.version, project=self.project)],
+ )
def test_github_webhook_for_tags(self, trigger_build):
"""GitHub webhook API."""
client = APIClient()
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'v1.0'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=self.version_tag, project=self.project)])
+ [mock.call(force=True, version=self.version_tag, project=self.project)],
+ )
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'refs/heads/non-existent'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=mock.ANY, project=self.project)])
+ [mock.call(force=True, version=mock.ANY, project=self.project)],
+ )
client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'ref': 'refs/tags/v1.0'},
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=self.version_tag, project=self.project)])
+ [mock.call(force=True, version=self.version_tag, project=self.project)],
+ )
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_github_create_event(self, sync_repository_task, trigger_build):
@@ -986,7 +974,7 @@ def test_github_invalid_webhook(self, trigger_build):
"""GitHub webhook unhandled event."""
client = APIClient()
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/github/{}/'.format(self.project.slug),
{'foo': 'bar'},
format='json',
HTTP_X_GITHUB_EVENT='pull_request',
@@ -1003,7 +991,7 @@ def test_gitlab_webhook_for_branches(self, trigger_build):
format='json',
)
trigger_build.assert_called_with(
- force=True, version=mock.ANY, project=self.project
+ force=True, version=mock.ANY, project=self.project,
)
trigger_build.reset_mock()
@@ -1029,7 +1017,7 @@ def test_gitlab_webhook_for_tags(self, trigger_build):
format='json',
)
trigger_build.assert_called_with(
- force=True, version=self.version_tag, project=self.project
+ force=True, version=self.version_tag, project=self.project,
)
trigger_build.reset_mock()
@@ -1042,7 +1030,7 @@ def test_gitlab_webhook_for_tags(self, trigger_build):
format='json',
)
trigger_build.assert_called_with(
- force=True, version=self.version_tag, project=self.project
+ force=True, version=self.version_tag, project=self.project,
)
trigger_build.reset_mock()
@@ -1058,7 +1046,8 @@ def test_gitlab_webhook_for_tags(self, trigger_build):
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_gitlab_push_hook_creation(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.gitlab_payload.update(
before=GITLAB_NULL_HASH,
@@ -1079,7 +1068,8 @@ def test_gitlab_push_hook_creation(
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_gitlab_push_hook_deletion(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.gitlab_payload.update(
before='95790bf891e76fee5e1747ab589903a6a1f80f22',
@@ -1100,7 +1090,8 @@ def test_gitlab_push_hook_deletion(
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_gitlab_tag_push_hook_creation(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.gitlab_payload.update(
object_kind=GITLAB_TAG_PUSH,
@@ -1122,7 +1113,8 @@ def test_gitlab_tag_push_hook_creation(
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_gitlab_tag_push_hook_deletion(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.gitlab_payload.update(
object_kind=GITLAB_TAG_PUSH,
@@ -1146,7 +1138,7 @@ def test_gitlab_invalid_webhook(self, trigger_build):
"""GitLab webhook unhandled event."""
client = APIClient()
resp = client.post(
- '/api/v2/webhook/gitlab/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
{'object_kind': 'pull_request'},
format='json',
)
@@ -1162,7 +1154,8 @@ def test_bitbucket_webhook(self, trigger_build):
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=mock.ANY, project=self.project)])
+ [mock.call(force=True, version=mock.ANY, project=self.project)],
+ )
client.post(
'/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
{
@@ -1178,7 +1171,8 @@ def test_bitbucket_webhook(self, trigger_build):
format='json',
)
trigger_build.assert_has_calls(
- [mock.call(force=True, version=mock.ANY, project=self.project)])
+ [mock.call(force=True, version=mock.ANY, project=self.project)],
+ )
trigger_build_call_count = trigger_build.call_count
client.post(
@@ -1198,7 +1192,8 @@ def test_bitbucket_webhook(self, trigger_build):
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_bitbucket_push_hook_creation(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.bitbucket_payload['push']['changes'][0]['old'] = None
resp = client.post(
@@ -1216,7 +1211,8 @@ def test_bitbucket_push_hook_creation(
@mock.patch('readthedocs.core.views.hooks.sync_repository_task')
def test_bitbucket_push_hook_deletion(
- self, sync_repository_task, trigger_build):
+ self, sync_repository_task, trigger_build,
+ ):
client = APIClient()
self.bitbucket_payload['push']['changes'][0]['new'] = None
resp = client.post(
@@ -1236,15 +1232,16 @@ def test_bitbucket_invalid_webhook(self, trigger_build):
"""Bitbucket webhook unhandled event."""
client = APIClient()
resp = client.post(
- '/api/v2/webhook/bitbucket/{0}/'.format(self.project.slug),
- {'foo': 'bar'}, format='json', HTTP_X_EVENT_KEY='pull_request')
+ '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ {'foo': 'bar'}, format='json', HTTP_X_EVENT_KEY='pull_request',
+ )
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
def test_generic_api_fails_without_auth(self, trigger_build):
client = APIClient()
resp = client.post(
- '/api/v2/webhook/generic/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/generic/{}/'.format(self.project.slug),
{},
format='json',
)
@@ -1262,7 +1259,7 @@ def test_generic_api_respects_token_auth(self, trigger_build):
)
self.assertIsNotNone(integration.token)
resp = client.post(
- '/api/v2/webhook/{0}/{1}/'.format(
+ '/api/v2/webhook/{}/{}/'.format(
self.project.slug,
integration.pk,
),
@@ -1273,7 +1270,7 @@ def test_generic_api_respects_token_auth(self, trigger_build):
self.assertTrue(resp.data['build_triggered'])
# Test nonexistent branch
resp = client.post(
- '/api/v2/webhook/{0}/{1}/'.format(
+ '/api/v2/webhook/{}/{}/'.format(
self.project.slug,
integration.pk,
),
@@ -1289,7 +1286,7 @@ def test_generic_api_respects_basic_auth(self, trigger_build):
self.project.users.add(user)
client.force_authenticate(user=user)
resp = client.post(
- '/api/v2/webhook/generic/{0}/'.format(self.project.slug),
+ '/api/v2/webhook/generic/{}/'.format(self.project.slug),
{},
format='json',
)
@@ -1301,10 +1298,11 @@ def test_generic_api_falls_back_to_token_auth(self, trigger_build):
user = get(User)
client.force_authenticate(user=user)
integration = Integration.objects.create(
- project=self.project, integration_type=Integration.API_WEBHOOK)
+ project=self.project, integration_type=Integration.API_WEBHOOK,
+ )
self.assertIsNotNone(integration.token)
resp = client.post(
- '/api/v2/webhook/{0}/{1}/'.format(
+ '/api/v2/webhook/{}/{}/'.format(
self.project.slug,
integration.pk,
),
@@ -1491,9 +1489,8 @@ def test_get_version_by_id(self):
)
def test_get_active_versions(self):
- """
- Test the full response of ``/api/v2/version/?project__slug=pip&active=true``
- """
+ """Test the full response of
+ ``/api/v2/version/?project__slug=pip&active=true``"""
pip = Project.objects.get(slug='pip')
data = QueryDict('', mutable=True)
@@ -1503,7 +1500,7 @@ def test_get_active_versions(self):
})
url = '{base_url}?{querystring}'.format(
base_url=reverse('version-list'),
- querystring=data.urlencode()
+ querystring=data.urlencode(),
)
resp = self.client.get(url, content_type='application/json')
@@ -1517,7 +1514,7 @@ def test_get_active_versions(self):
})
url = '{base_url}?{querystring}'.format(
base_url=reverse('version-list'),
- querystring=data.urlencode()
+ querystring=data.urlencode(),
)
resp = self.client.get(url, content_type='application/json')
@@ -1534,11 +1531,13 @@ def test_get_status_data(self):
{'data': 'public'},
'Something bad happened',
)
- self.assertEqual(data, {
- 'name': 'public_task_exception',
- 'data': {'data': 'public'},
- 'started': True,
- 'finished': True,
- 'success': False,
- 'error': 'Something bad happened',
- })
+ self.assertEqual(
+ data, {
+ 'name': 'public_task_exception',
+ 'data': {'data': 'public'},
+ 'started': True,
+ 'finished': True,
+ 'success': False,
+ 'error': 'Something bad happened',
+ },
+ )
diff --git a/readthedocs/rtd_tests/tests/test_api_permissions.py b/readthedocs/rtd_tests/tests/test_api_permissions.py
index d3666927add..7367d8f5d6a 100644
--- a/readthedocs/rtd_tests/tests/test_api_permissions.py
+++ b/readthedocs/rtd_tests/tests/test_api_permissions.py
@@ -1,8 +1,9 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
from functools import partial
-from mock import Mock
from unittest import TestCase
+from mock import Mock
+
from readthedocs.restapi.permissions import APIRestrictedPermission
@@ -17,23 +18,27 @@ def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
- view=None))
+ view=None,
+ ))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
- obj=obj))
+ obj=obj,
+ ))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
- view=None))
+ view=None,
+ ))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
- obj=obj))
+ obj=obj,
+ ))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
diff --git a/readthedocs/rtd_tests/tests/test_api_version_compare.py b/readthedocs/rtd_tests/tests/test_api_version_compare.py
index 0daec18a086..24b94ad39d6 100644
--- a/readthedocs/rtd_tests/tests/test_api_version_compare.py
+++ b/readthedocs/rtd_tests/tests/test_api_version_compare.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
from django.test import TestCase
from readthedocs.builds.constants import LATEST
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py
index fdc85693c3b..5618dd92151 100644
--- a/readthedocs/rtd_tests/tests/test_backend.py
+++ b/readthedocs/rtd_tests/tests/test_backend.py
@@ -1,19 +1,10 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import os
from os.path import exists
from tempfile import mkdtemp
import django_dynamic_fixture as fixture
-import pytest
-import six
from django.contrib.auth.models import User
from mock import Mock, patch
@@ -34,15 +25,15 @@
class TestGitBackend(RTDTestCase):
def setUp(self):
git_repo = make_test_git()
- super(TestGitBackend, self).setUp()
+ super().setUp()
self.eric = User(username='eric')
self.eric.set_password('test')
self.eric.save()
self.project = Project.objects.create(
- name="Test Project",
- repo_type="git",
+ name='Test Project',
+ repo_type='git',
#Our top-level checkout
- repo=git_repo
+ repo=git_repo,
)
self.project.users.add(self.eric)
self.dummy_conf = Mock()
@@ -81,7 +72,6 @@ def test_git_branches(self, checkout_path):
{branch.verbose_name for branch in repo.branches},
)
- @pytest.mark.skipif(six.PY2, reason='Only for python3')
@patch('readthedocs.projects.models.Project.checkout_path')
def test_git_branches_unicode(self, checkout_path):
repo_path = self.project.repo
@@ -126,7 +116,7 @@ def test_git_checkout_invalid_revision(self):
repo.checkout(version)
self.assertEqual(
str(e.exception),
- RepositoryError.FAILED_TO_CHECKOUT.format(version)
+ RepositoryError.FAILED_TO_CHECKOUT.format(version),
)
def test_git_tags(self):
@@ -139,7 +129,7 @@ def test_git_tags(self):
# so we need to hack the repo path
repo.working_dir = repo_path
self.assertEqual(
- set(['v01', 'v02', 'release-ünîø∂é']),
+ {'v01', 'v02', 'release-ünîø∂é'},
{vcs.verbose_name for vcs in repo.tags},
)
@@ -189,14 +179,14 @@ def test_check_submodule_urls(self):
def test_check_invalid_submodule_urls(self):
repo = self.project.vcs_repo()
repo.update()
- r = repo.checkout('invalidsubmodule')
+ repo.checkout('invalidsubmodule')
with self.assertRaises(RepositoryError) as e:
repo.update_submodules(self.dummy_conf)
# `invalid` is created in `make_test_git`
# it's a url in ssh form.
self.assertEqual(
str(e.exception),
- RepositoryError.INVALID_SUBMODULES.format(['invalid'])
+ RepositoryError.INVALID_SUBMODULES.format(['invalid']),
)
@patch('readthedocs.projects.models.Project.checkout_path')
@@ -218,28 +208,28 @@ def test_fetch_clean_tags_and_branches(self, checkout_path):
# We still have all branches and tags in the local repo
self.assertEqual(
- set(['v01', 'v02']),
- set(vcs.verbose_name for vcs in repo.tags)
+ {'v01', 'v02'},
+ {vcs.verbose_name for vcs in repo.tags},
)
self.assertEqual(
- set([
+ {
'invalidsubmodule', 'master', 'submodule', 'newbranch',
- ]),
- set(vcs.verbose_name for vcs in repo.branches)
+ },
+ {vcs.verbose_name for vcs in repo.branches},
)
repo.update()
# We don't have the eliminated branches and tags in the local repo
self.assertEqual(
- set(['v01']),
- set(vcs.verbose_name for vcs in repo.tags)
+ {'v01'},
+ {vcs.verbose_name for vcs in repo.tags},
)
self.assertEqual(
- set([
- 'invalidsubmodule', 'master', 'submodule'
- ]),
- set(vcs.verbose_name for vcs in repo.branches)
+ {
+ 'invalidsubmodule', 'master', 'submodule',
+ },
+ {vcs.verbose_name for vcs in repo.branches},
)
@@ -247,7 +237,7 @@ class TestHgBackend(RTDTestCase):
def setUp(self):
hg_repo = make_test_hg()
- super(TestHgBackend, self).setUp()
+ super().setUp()
self.eric = User(username='eric')
self.eric.set_password('test')
self.eric.save()
@@ -255,7 +245,7 @@ def setUp(self):
name='Test Project',
repo_type='hg',
# Our top-level checkout
- repo=hg_repo
+ repo=hg_repo,
)
self.project.users.add(self.eric)
@@ -286,7 +276,7 @@ def test_checkout_invalid_revision(self):
repo.checkout(version)
self.assertEqual(
str(e.exception),
- RepositoryError.FAILED_TO_CHECKOUT.format(version)
+ RepositoryError.FAILED_TO_CHECKOUT.format(version),
)
def test_parse_tags(self):
diff --git a/readthedocs/rtd_tests/tests/test_backend_svn.py b/readthedocs/rtd_tests/tests/test_backend_svn.py
index 8de9ea776a0..eec70268267 100644
--- a/readthedocs/rtd_tests/tests/test_backend_svn.py
+++ b/readthedocs/rtd_tests/tests/test_backend_svn.py
@@ -1,21 +1,14 @@
# -*- coding: utf-8 -*-
-"""Tests For SVN"""
+"""Tests For SVN."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
-from mock import patch
from django_dynamic_fixture import get
-from readthedocs.rtd_tests.base import RTDTestCase
-from readthedocs.projects.models import Project
from readthedocs.builds.models import Version
+from readthedocs.projects.models import Project
+from readthedocs.rtd_tests.base import RTDTestCase
from readthedocs.vcs_support.backends.svn import Backend as SvnBackend
+
class TestSvnBackend(RTDTestCase):
def test_get_url(self):
diff --git a/readthedocs/rtd_tests/tests/test_build_config.py b/readthedocs/rtd_tests/tests/test_build_config.py
index b0c2186d419..a0fd290b327 100644
--- a/readthedocs/rtd_tests/tests/test_build_config.py
+++ b/readthedocs/rtd_tests/tests/test_build_config.py
@@ -1,26 +1,25 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
from os import path
import pytest
-import six
import yamale
-from readthedocs.config.tests import utils
from yamale.validators import DefaultValidators, Validator
+from readthedocs.config.tests import utils
+
+
V2_SCHEMA = path.join(
path.dirname(__file__),
- '../fixtures/spec/v2/schema.yml'
+ '../fixtures/spec/v2/schema.yml',
)
class PathValidator(Validator):
"""
- Path validator
+ Path validator.
- Checks if the given value is a string and a existing
- file.
+ Checks if the given value is a string and a existing file.
"""
tag = 'path'
@@ -28,10 +27,10 @@ class PathValidator(Validator):
configuration_file = '.'
def _is_valid(self, value):
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
file_ = path.join(
path.dirname(self.configuration_file),
- value
+ value,
)
return path.exists(file_)
return False
@@ -59,7 +58,7 @@ def validate_schema(file):
data = yamale.make_data(file)
schema = yamale.make_schema(
V2_SCHEMA,
- validators=validators
+ validators=validators,
)
yamale.validate(schema, data)
@@ -85,7 +84,7 @@ def test_invalid_version(tmpdir):
assertInvalidConfig(
tmpdir,
'version: "latest"',
- ['version:', "'latest' not in"]
+ ['version:', "'latest' not in"],
)
@@ -93,7 +92,7 @@ def test_invalid_version_1(tmpdir):
assertInvalidConfig(
tmpdir,
'version: "1"',
- ['version', "'1' not in"]
+ ['version', "'1' not in"],
)
@@ -135,7 +134,7 @@ def test_formats_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ['formats', "'invalidformat' not in"]
+ ['formats', "'invalidformat' not in"],
)
@@ -165,7 +164,7 @@ def test_conda_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ['environment.yaml', 'is not a path']
+ ['environment.yaml', 'is not a path'],
)
@@ -178,7 +177,7 @@ def test_conda_missing_key(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ['conda.environment: Required']
+ ['conda.environment: Required'],
)
@@ -210,7 +209,7 @@ def test_build_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ["build.image: '9.0' not in"]
+ ["build.image: '9.0' not in"],
)
@@ -233,7 +232,7 @@ def test_python_version_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ["version: '4' not in"]
+ ["version: '4' not in"],
)
@@ -266,7 +265,7 @@ def test_python_install_requirements(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ['requirements:', "'23' is not a path"]
+ ['requirements:', "'23' is not a path"],
)
@@ -292,7 +291,7 @@ def test_python_install_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ["python.install: 'guido' is not a list"]
+ ["python.install: 'guido' is not a list"],
)
@@ -332,13 +331,15 @@ def test_python_install_extra_requirements_empty(tmpdir, value):
@pytest.mark.parametrize('pipfile', ['another_docs/', '.', 'project/'])
def test_python_install_pipfile(tmpdir, pipfile):
- utils.apply_fs(tmpdir, {
- 'another_docs': {
+ utils.apply_fs(
+ tmpdir, {
+ 'another_docs': {
+ 'Pipfile': '',
+ },
+ 'project': {},
'Pipfile': '',
},
- 'project': {},
- 'Pipfile': '',
- })
+ )
content = '''
version: "2"
python:
@@ -417,7 +418,7 @@ def test_python_system_packages_invalid(tmpdir, value):
assertInvalidConfig(
tmpdir,
content.format(value=value),
- ['is not a bool']
+ ['is not a bool'],
)
@@ -449,7 +450,7 @@ def test_sphinx_invalid(tmpdir, value):
assertInvalidConfig(
tmpdir,
content,
- ['is not a path']
+ ['is not a path'],
)
@@ -472,7 +473,7 @@ def test_sphinx_fail_on_warning_invalid(tmpdir, value):
assertInvalidConfig(
tmpdir,
content.format(value=value),
- ['is not a bool']
+ ['is not a bool'],
)
@@ -504,7 +505,7 @@ def test_mkdocs_invalid(tmpdir, value):
assertInvalidConfig(
tmpdir,
content,
- ['is not a path']
+ ['is not a path'],
)
@@ -527,7 +528,7 @@ def test_mkdocs_fail_on_warning_invalid(tmpdir, value):
assertInvalidConfig(
tmpdir,
content.format(value=value),
- ['is not a bool']
+ ['is not a bool'],
)
@@ -595,7 +596,7 @@ def test_redirects_invalid(tmpdir):
assertInvalidConfig(
tmpdir,
content,
- ['is not a str']
+ ['is not a str'],
)
diff --git a/readthedocs/rtd_tests/tests/test_build_forms.py b/readthedocs/rtd_tests/tests/test_build_forms.py
index b0ba6890d09..1e901771632 100644
--- a/readthedocs/rtd_tests/tests/test_build_forms.py
+++ b/readthedocs/rtd_tests/tests/test_build_forms.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
from django.test import TestCase
from django_dynamic_fixture import get
@@ -30,7 +28,7 @@ def test_default_version_is_active(self):
'active': True,
'privacy_level': PRIVATE,
},
- instance=version
+ instance=version,
)
self.assertTrue(form.is_valid())
@@ -48,7 +46,7 @@ def test_default_version_is_inactive(self):
'active': False,
'privacy_level': PRIVATE,
},
- instance=version
+ instance=version,
)
self.assertFalse(form.is_valid())
self.assertIn('active', form.errors)
diff --git a/readthedocs/rtd_tests/tests/test_build_notifications.py b/readthedocs/rtd_tests/tests/test_build_notifications.py
index 9460589463a..81acedb013c 100644
--- a/readthedocs/rtd_tests/tests/test_build_notifications.py
+++ b/readthedocs/rtd_tests/tests/test_build_notifications.py
@@ -1,18 +1,15 @@
# -*- coding: utf-8 -*-
"""Notifications sent after build is completed."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import django_dynamic_fixture as fixture
from django.core import mail
from django.test import TestCase
from mock import patch
from readthedocs.builds.models import Build, Version
-from readthedocs.projects.models import Project, EmailHook, WebHook
-from readthedocs.projects.tasks import send_notifications
from readthedocs.projects.forms import WebHookForm
+from readthedocs.projects.models import EmailHook, Project, WebHook
+from readthedocs.projects.tasks import send_notifications
class BuildNotificationsTests(TestCase):
@@ -74,7 +71,11 @@ def test_webhook_form_url_length(self):
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors,
- {'url':
- ['Enter a valid URL.',
- 'Ensure this value has at most 600 characters (it has 1507).']
- })
+ {
+ 'url':
+ [
+ 'Enter a valid URL.',
+ 'Ensure this value has at most 600 characters (it has 1507).',
+ ],
+ },
+ )
diff --git a/readthedocs/rtd_tests/tests/test_builds.py b/readthedocs/rtd_tests/tests/test_builds.py
index 53e222203fe..afdbe46a3c9 100644
--- a/readthedocs/rtd_tests/tests/test_builds.py
+++ b/readthedocs/rtd_tests/tests/test_builds.py
@@ -1,12 +1,7 @@
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+# -*- coding: utf-8 -*-
+import os
import mock
-import os
from django.test import TestCase
from django_dynamic_fixture import fixture, get
@@ -14,7 +9,7 @@
from readthedocs.doc_builder.config import load_yaml_config
from readthedocs.doc_builder.environments import LocalBuildEnvironment
from readthedocs.doc_builder.python_environments import Virtualenv
-from readthedocs.projects.models import Project, EnvironmentVariable
+from readthedocs.projects.models import EnvironmentVariable, Project
from readthedocs.projects.tasks import UpdateDocsTaskStep
from readthedocs.rtd_tests.tests.test_config_integration import create_load
@@ -32,18 +27,22 @@ def tearDown(self):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build(self, load_config):
- '''Test full build'''
+ """Test full build."""
load_config.side_effect = create_load()
- project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
self.mocks.configure_mock('api_versions', {'return_value': [version]})
- self.mocks.configure_mock('api', {
- 'get.return_value': {'downloads': "no_url_here"}
- })
+ self.mocks.configure_mock(
+ 'api', {
+ 'get.return_value': {'downloads': 'no_url_here'},
+ },
+ )
self.mocks.patches['html_build'].stop()
build_env = LocalBuildEnvironment(project=project, version=version, build={})
@@ -51,7 +50,7 @@ def test_build(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
task.build_docs()
@@ -63,15 +62,17 @@ def test_build(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build_respects_pdf_flag(self, load_config):
- '''Build output format control'''
+ """Build output format control."""
load_config.side_effect = create_load()
- project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- enable_pdf_build=True,
- enable_epub_build=False,
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ enable_pdf_build=True,
+ enable_epub_build=False,
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
build_env = LocalBuildEnvironment(project=project, version=version, build={})
@@ -79,7 +80,7 @@ def test_build_respects_pdf_flag(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
task.build_docs()
@@ -99,20 +100,20 @@ def test_dont_localmedia_build_pdf_epub_search_in_mkdocs(self, load_config):
documentation_type='mkdocs',
enable_pdf_build=True,
enable_epub_build=True,
- versions=[fixture()]
+ versions=[fixture()],
)
version = project.versions.all().first()
build_env = LocalBuildEnvironment(
project=project,
version=version,
- build={}
+ build={},
)
python_env = Virtualenv(version=version, build_env=build_env)
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
task.build_docs()
@@ -126,15 +127,17 @@ def test_dont_localmedia_build_pdf_epub_search_in_mkdocs(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build_respects_epub_flag(self, load_config):
- '''Test build with epub enabled'''
+ """Test build with epub enabled."""
load_config.side_effect = create_load()
- project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- enable_pdf_build=False,
- enable_epub_build=True,
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ enable_pdf_build=False,
+ enable_epub_build=True,
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
build_env = LocalBuildEnvironment(project=project, version=version, build={})
@@ -142,7 +145,7 @@ def test_build_respects_epub_flag(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
task.build_docs()
@@ -154,15 +157,17 @@ def test_build_respects_epub_flag(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build_respects_yaml(self, load_config):
- '''Test YAML build options'''
+ """Test YAML build options."""
load_config.side_effect = create_load({'formats': ['epub']})
- project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- enable_pdf_build=False,
- enable_epub_build=False,
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ enable_pdf_build=False,
+ enable_epub_build=False,
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
build_env = LocalBuildEnvironment(project=project, version=version, build={})
@@ -171,7 +176,7 @@ def test_build_respects_yaml(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
task.build_docs()
@@ -183,19 +188,21 @@ def test_build_respects_yaml(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build_pdf_latex_failures(self, load_config):
- '''Build failure if latex fails'''
+ """Build failure if latex fails."""
load_config.side_effect = create_load()
self.mocks.patches['html_build'].stop()
self.mocks.patches['pdf_build'].stop()
- project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- enable_pdf_build=True,
- enable_epub_build=False,
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ enable_pdf_build=True,
+ enable_epub_build=False,
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
assert project.conf_dir() == '/tmp/rtd'
@@ -204,7 +211,7 @@ def test_build_pdf_latex_failures(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
# Mock out the separate calls to Popen using an iterable side_effect
@@ -216,10 +223,13 @@ def test_build_pdf_latex_failures(self, load_config):
((b'', b''), 0), # latex
]
mock_obj = mock.Mock()
- mock_obj.communicate.side_effect = [output for (output, status)
- in returns]
+ mock_obj.communicate.side_effect = [
+ output for (output, status)
+ in returns
+ ]
type(mock_obj).returncode = mock.PropertyMock(
- side_effect=[status for (output, status) in returns])
+ side_effect=[status for (output, status) in returns],
+ )
self.mocks.popen.return_value = mock_obj
with build_env:
@@ -229,19 +239,21 @@ def test_build_pdf_latex_failures(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_build_pdf_latex_not_failure(self, load_config):
- '''Test pass during PDF builds and bad latex failure status code'''
+ """Test pass during PDF builds and bad latex failure status code."""
load_config.side_effect = create_load()
self.mocks.patches['html_build'].stop()
self.mocks.patches['pdf_build'].stop()
- project = get(Project,
- slug='project-2',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- enable_pdf_build=True,
- enable_epub_build=False,
- versions=[fixture()])
+ project = get(
+ Project,
+ slug='project-2',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ enable_pdf_build=True,
+ enable_epub_build=False,
+ versions=[fixture()],
+ )
version = project.versions.all()[0]
assert project.conf_dir() == '/tmp/rtd'
@@ -250,7 +262,7 @@ def test_build_pdf_latex_not_failure(self, load_config):
config = load_yaml_config(version)
task = UpdateDocsTaskStep(
build_env=build_env, project=project, python_env=python_env,
- version=version, config=config
+ version=version, config=config,
)
# Mock out the separate calls to Popen using an iterable side_effect
@@ -262,10 +274,13 @@ def test_build_pdf_latex_not_failure(self, load_config):
((b'', b''), 0), # latex
]
mock_obj = mock.Mock()
- mock_obj.communicate.side_effect = [output for (output, status)
- in returns]
+ mock_obj.communicate.side_effect = [
+ output for (output, status)
+ in returns
+ ]
type(mock_obj).returncode = mock.PropertyMock(
- side_effect=[status for (output, status) in returns])
+ side_effect=[status for (output, status) in returns],
+ )
self.mocks.popen.return_value = mock_obj
with build_env:
@@ -286,7 +301,7 @@ def test_save_config_in_build_model(self, load_config, api_v2):
build = get(Build)
version = get(Version, slug='1.8', project=project)
task = UpdateDocsTaskStep(
- project=project, version=version, build={'id': build.pk}
+ project=project, version=version, build={'id': build.pk},
)
task.setup_vcs = mock.Mock()
task.run_setup()
@@ -358,13 +373,13 @@ def test_get_previous_build(self):
Build,
project=self.project,
version=self.version,
- config={'version': 1}
+ config={'version': 1},
)
build_two = get(
Build,
project=self.project,
version=self.version,
- config={'version': 2}
+ config={'version': 2},
)
build_three = get(
Build,
diff --git a/readthedocs/rtd_tests/tests/test_celery.py b/readthedocs/rtd_tests/tests/test_celery.py
index 7de03a97bf2..5ac9fac8883 100644
--- a/readthedocs/rtd_tests/tests/test_celery.py
+++ b/readthedocs/rtd_tests/tests/test_celery.py
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import division, print_function, unicode_literals
-
import os
import shutil
from os.path import exists
@@ -8,36 +6,42 @@
from django.contrib.auth.models import User
from django_dynamic_fixture import get
-from mock import patch, MagicMock
+from mock import MagicMock, patch
from readthedocs.builds.constants import LATEST
-from readthedocs.projects.exceptions import RepositoryError
from readthedocs.builds.models import Build
-from readthedocs.projects.models import Project
+from readthedocs.doc_builder.exceptions import VersionLockedError
from readthedocs.projects import tasks
-
-from readthedocs.rtd_tests.utils import (
- create_git_branch, create_git_tag, delete_git_branch)
-from readthedocs.rtd_tests.utils import make_test_git
+from readthedocs.projects.exceptions import RepositoryError
+from readthedocs.projects.models import Project
from readthedocs.rtd_tests.base import RTDTestCase
from readthedocs.rtd_tests.mocks.mock_api import mock_api
-from readthedocs.doc_builder.exceptions import VersionLockedError
+from readthedocs.rtd_tests.utils import (
+ create_git_branch,
+ create_git_tag,
+ delete_git_branch,
+ make_test_git,
+)
class TestCeleryBuilding(RTDTestCase):
- """These tests run the build functions directly. They don't use celery"""
+ """
+ These tests run the build functions directly.
+
+ They don't use celery
+ """
def setUp(self):
repo = make_test_git()
self.repo = repo
- super(TestCeleryBuilding, self).setUp()
+ super().setUp()
self.eric = User(username='eric')
self.eric.set_password('test')
self.eric.save()
self.project = Project.objects.create(
- name="Test Project",
- repo_type="git",
+ name='Test Project',
+ repo_type='git',
# Our top-level checkout
repo=repo,
)
@@ -45,7 +49,7 @@ def setUp(self):
def tearDown(self):
shutil.rmtree(self.repo)
- super(TestCeleryBuilding, self).tearDown()
+ super().tearDown()
def test_remove_dirs(self):
directory = mkdtemp()
@@ -74,14 +78,17 @@ def test_clear_artifacts(self):
@patch('readthedocs.projects.tasks.UpdateDocsTaskStep.build_docs', new=MagicMock)
@patch('readthedocs.projects.tasks.UpdateDocsTaskStep.setup_vcs', new=MagicMock)
def test_update_docs(self):
- build = get(Build, project=self.project,
- version=self.project.versions.first())
+ build = get(
+ Build, project=self.project,
+ version=self.project.versions.first(),
+ )
with mock_api(self.repo) as mapi:
result = tasks.update_docs_task.delay(
self.project.pk,
build_pk=build.pk,
record=False,
- intersphinx=False)
+ intersphinx=False,
+ )
self.assertTrue(result.successful())
@patch('readthedocs.projects.tasks.UpdateDocsTaskStep.setup_python_environment', new=MagicMock)
@@ -91,14 +98,17 @@ def test_update_docs(self):
def test_update_docs_unexpected_setup_exception(self, mock_setup_vcs):
exc = Exception()
mock_setup_vcs.side_effect = exc
- build = get(Build, project=self.project,
- version=self.project.versions.first())
+ build = get(
+ Build, project=self.project,
+ version=self.project.versions.first(),
+ )
with mock_api(self.repo) as mapi:
result = tasks.update_docs_task.delay(
self.project.pk,
build_pk=build.pk,
record=False,
- intersphinx=False)
+ intersphinx=False,
+ )
self.assertTrue(result.successful())
@patch('readthedocs.projects.tasks.UpdateDocsTaskStep.setup_python_environment', new=MagicMock)
@@ -108,14 +118,17 @@ def test_update_docs_unexpected_setup_exception(self, mock_setup_vcs):
def test_update_docs_unexpected_build_exception(self, mock_build_docs):
exc = Exception()
mock_build_docs.side_effect = exc
- build = get(Build, project=self.project,
- version=self.project.versions.first())
+ build = get(
+ Build, project=self.project,
+ version=self.project.versions.first(),
+ )
with mock_api(self.repo) as mapi:
result = tasks.update_docs_task.delay(
self.project.pk,
build_pk=build.pk,
record=False,
- intersphinx=False)
+ intersphinx=False,
+ )
self.assertTrue(result.successful())
@patch('readthedocs.projects.tasks.UpdateDocsTaskStep.setup_python_environment', new=MagicMock)
@@ -125,14 +138,17 @@ def test_update_docs_unexpected_build_exception(self, mock_build_docs):
def test_no_notification_on_version_locked_error(self, mock_setup_vcs, mock_send_notifications):
mock_setup_vcs.side_effect = VersionLockedError()
- build = get(Build, project=self.project,
- version=self.project.versions.first())
+ build = get(
+ Build, project=self.project,
+ version=self.project.versions.first(),
+ )
with mock_api(self.repo) as mapi:
result = tasks.update_docs_task.delay(
self.project.pk,
build_pk=build.pk,
record=False,
- intersphinx=False)
+ intersphinx=False,
+ )
mock_send_notifications.assert_not_called()
self.assertTrue(result.successful())
@@ -162,7 +178,7 @@ def test_check_duplicate_reserved_version_latest(self, checkout_path, api_v2):
sync_repository.sync_repo()
self.assertEqual(
str(e.exception),
- RepositoryError.DUPLICATED_RESERVED_VERSIONS
+ RepositoryError.DUPLICATED_RESERVED_VERSIONS,
)
delete_git_branch(self.repo, 'latest')
@@ -188,7 +204,7 @@ def test_check_duplicate_reserved_version_stable(self, checkout_path, api_v2):
sync_repository.sync_repo()
self.assertEqual(
str(e.exception),
- RepositoryError.DUPLICATED_RESERVED_VERSIONS
+ RepositoryError.DUPLICATED_RESERVED_VERSIONS,
)
# TODO: Check that we can build properly after
@@ -226,9 +242,11 @@ def public_task_exception():
# although the task risen an exception, it's success since we add the
# exception into the ``info`` attributes
self.assertEqual(result.status, 'SUCCESS')
- self.assertEqual(result.info, {
- 'task_name': 'public_task_exception',
- 'context': {},
- 'public_data': {},
- 'error': 'Something bad happened',
- })
+ self.assertEqual(
+ result.info, {
+ 'task_name': 'public_task_exception',
+ 'context': {},
+ 'public_data': {},
+ 'error': 'Something bad happened',
+ },
+ )
diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py
index 95cdefb54d4..e09e82199bb 100644
--- a/readthedocs/rtd_tests/tests/test_config_integration.py
+++ b/readthedocs/rtd_tests/tests/test_config_integration.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import tempfile
from os import path
@@ -93,7 +90,7 @@ def test_python_supported_versions_default_image_1_0(self, load_config):
'formats': [
'htmlzip',
'epub',
- 'pdf'
+ 'pdf',
],
'use_system_packages': self.project.use_system_packages,
'requirements_file': self.project.requirements_file,
@@ -113,8 +110,10 @@ def test_python_supported_versions_image_1_0(self, load_config):
self.project.container_image = 'readthedocs/build:1.0'
self.project.save()
config = load_yaml_config(self.version)
- self.assertEqual(config.get_valid_python_versions(),
- [2, 2.7, 3, 3.4])
+ self.assertEqual(
+ config.get_valid_python_versions(),
+ [2, 2.7, 3, 3.4],
+ )
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_python_supported_versions_image_2_0(self, load_config):
@@ -122,8 +121,10 @@ def test_python_supported_versions_image_2_0(self, load_config):
self.project.container_image = 'readthedocs/build:2.0'
self.project.save()
config = load_yaml_config(self.version)
- self.assertEqual(config.get_valid_python_versions(),
- [2, 2.7, 3, 3.5])
+ self.assertEqual(
+ config.get_valid_python_versions(),
+ [2, 2.7, 3, 3.5],
+ )
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_python_supported_versions_image_latest(self, load_config):
@@ -131,8 +132,10 @@ def test_python_supported_versions_image_latest(self, load_config):
self.project.container_image = 'readthedocs/build:latest'
self.project.save()
config = load_yaml_config(self.version)
- self.assertEqual(config.get_valid_python_versions(),
- [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6])
+ self.assertEqual(
+ config.get_valid_python_versions(),
+ [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6],
+ )
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_python_default_version(self, load_config):
@@ -165,7 +168,7 @@ def test_python_set_python_version_in_config(self, load_config):
@mock.patch('readthedocs.doc_builder.config.load_config')
def test_python_invalid_version_in_config(self, load_config):
load_config.side_effect = create_load({
- 'python': {'version': 2.6}
+ 'python': {'version': 2.6},
})
self.project.container_image = 'readthedocs/build:2.0'
self.project.save()
@@ -178,11 +181,11 @@ def test_install_project(self, load_config):
config = load_yaml_config(self.version)
self.assertEqual(
config.python.install_with_pip or config.python.install_with_setup,
- False
+ False,
)
load_config.side_effect = create_load({
- 'python': {'setup_py_install': True}
+ 'python': {'setup_py_install': True},
})
config = load_yaml_config(self.version)
self.assertEqual(config.python.install_with_setup, True)
@@ -192,16 +195,16 @@ def test_extra_requirements(self, load_config):
load_config.side_effect = create_load({
'python': {
'pip_install': True,
- 'extra_requirements': ['tests', 'docs']
- }
+ 'extra_requirements': ['tests', 'docs'],
+ },
})
config = load_yaml_config(self.version)
self.assertEqual(config.python.extra_requirements, ['tests', 'docs'])
load_config.side_effect = create_load({
'python': {
- 'extra_requirements': ['tests', 'docs']
- }
+ 'extra_requirements': ['tests', 'docs'],
+ },
})
config = load_yaml_config(self.version)
self.assertEqual(config.python.extra_requirements, [])
@@ -213,8 +216,8 @@ def test_extra_requirements(self, load_config):
load_config.side_effect = create_load({
'python': {
'setup_py_install': True,
- 'extra_requirements': ['tests', 'docs']
- }
+ 'extra_requirements': ['tests', 'docs'],
+ },
})
config = load_yaml_config(self.version)
self.assertEqual(config.python.extra_requirements, [])
@@ -231,7 +234,7 @@ def test_conda_with_cofig(self, checkout_path):
{
'conda': {
'file': conda_file,
- }
+ },
},
base_path=base_path,
)
@@ -286,7 +289,7 @@ def test_requirements_file_from_yml(self, checkout_path):
@pytest.mark.django_db
@mock.patch('readthedocs.projects.models.Project.checkout_path')
-class TestLoadConfigV2(object):
+class TestLoadConfigV2:
@pytest.fixture(autouse=True)
def create_project(self):
@@ -305,9 +308,11 @@ def create_project(self):
)
def create_config_file(self, tmpdir, config):
- base_path = apply_fs(tmpdir, {
- 'readthedocs.yml': '',
- })
+ base_path = apply_fs(
+ tmpdir, {
+ 'readthedocs.yml': '',
+ },
+ )
config.setdefault('version', 2)
config_file = path.join(str(base_path), 'readthedocs.yml')
yaml.safe_dump(config, open(config_file, 'w'))
@@ -315,7 +320,7 @@ def create_config_file(self, tmpdir, config):
def get_update_docs_task(self):
build_env = LocalBuildEnvironment(
- self.project, self.version, record=False
+ self.project, self.version, record=False,
)
update_docs = tasks.UpdateDocsTaskStep(
@@ -344,7 +349,8 @@ def test_report_using_invalid_version(self, checkout_path, tmpdir):
@patch('readthedocs.doc_builder.backends.sphinx.HtmlBuilder.build')
@patch('readthedocs.doc_builder.backends.sphinx.HtmlBuilder.append_conf')
def test_build_formats_default_empty(
- self, append_conf, html_build, checkout_path, config, tmpdir):
+ self, append_conf, html_build, checkout_path, config, tmpdir,
+ ):
"""
The default value for formats is [], which means no extra
formats are build.
@@ -356,7 +362,7 @@ def test_build_formats_default_empty(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=update_docs.config
+ config=update_docs.config,
)
update_docs.python_env = python_env
outcomes = update_docs.build_docs()
@@ -373,10 +379,9 @@ def test_build_formats_default_empty(
@patch('readthedocs.doc_builder.backends.sphinx.HtmlBuilder.append_conf')
def test_build_formats_only_pdf(
self, append_conf, html_build, build_docs_class,
- checkout_path, tmpdir):
- """
- Only the pdf format is build.
- """
+ checkout_path, tmpdir,
+ ):
+ """Only the pdf format is build."""
checkout_path.return_value = str(tmpdir)
self.create_config_file(tmpdir, {'formats': ['pdf']})
@@ -384,7 +389,7 @@ def test_build_formats_only_pdf(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=update_docs.config
+ config=update_docs.config,
)
update_docs.python_env = python_env
@@ -408,8 +413,8 @@ def test_conda_environment(self, build_failed, checkout_path, tmpdir):
base_path = self.create_config_file(
tmpdir,
{
- 'conda': {'environment': conda_file}
- }
+ 'conda': {'environment': conda_file},
+ },
)
update_docs = self.get_update_docs_task()
@@ -466,8 +471,8 @@ def test_python_requirements(self, run, checkout_path, tmpdir):
base_path = self.create_config_file(
tmpdir,
{
- 'python': {'requirements': requirements_file}
- }
+ 'python': {'requirements': requirements_file},
+ },
)
update_docs = self.get_update_docs_task()
@@ -476,7 +481,7 @@ def test_python_requirements(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.install_user_requirements()
@@ -493,8 +498,8 @@ def test_python_requirements_empty(self, run, checkout_path, tmpdir):
self.create_config_file(
tmpdir,
{
- 'python': {'requirements': ''}
- }
+ 'python': {'requirements': ''},
+ },
)
update_docs = self.get_update_docs_task()
@@ -503,7 +508,7 @@ def test_python_requirements_empty(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.install_user_requirements()
@@ -517,8 +522,8 @@ def test_python_install_setup(self, run, checkout_path, tmpdir):
self.create_config_file(
tmpdir,
{
- 'python': {'install': 'setup.py'}
- }
+ 'python': {'install': 'setup.py'},
+ },
)
update_docs = self.get_update_docs_task()
@@ -527,7 +532,7 @@ def test_python_install_setup(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.install_package()
@@ -545,8 +550,8 @@ def test_python_install_pip(self, run, checkout_path, tmpdir):
self.create_config_file(
tmpdir,
{
- 'python': {'install': 'pip'}
- }
+ 'python': {'install': 'pip'},
+ },
)
update_docs = self.get_update_docs_task()
@@ -555,7 +560,7 @@ def test_python_install_pip(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.install_package()
@@ -588,8 +593,8 @@ def test_python_extra_requirements(self, run, checkout_path, tmpdir):
'python': {
'install': 'pip',
'extra_requirements': ['docs'],
- }
- }
+ },
+ },
)
update_docs = self.get_update_docs_task()
@@ -598,7 +603,7 @@ def test_python_extra_requirements(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.install_package()
@@ -619,8 +624,8 @@ def test_system_packages(self, run, checkout_path, tmpdir):
{
'python': {
'system_packages': True,
- }
- }
+ },
+ },
)
update_docs = self.get_update_docs_task()
@@ -629,7 +634,7 @@ def test_system_packages(self, run, checkout_path, tmpdir):
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
update_docs.python_env.setup_base()
@@ -639,13 +644,18 @@ def test_system_packages(self, run, checkout_path, tmpdir):
assert '--system-site-packages' in args
assert config.python.use_system_site_packages
- @pytest.mark.parametrize('value,result',
- [('html', 'sphinx'),
- ('htmldir', 'sphinx_htmldir'),
- ('singlehtml', 'sphinx_singlehtml')])
+ @pytest.mark.parametrize(
+ 'value,result',
+ [
+ ('html', 'sphinx'),
+ ('htmldir', 'sphinx_htmldir'),
+ ('singlehtml', 'sphinx_singlehtml'),
+ ],
+ )
@patch('readthedocs.projects.tasks.get_builder_class')
def test_sphinx_builder(
- self, get_builder_class, checkout_path, value, result, tmpdir):
+ self, get_builder_class, checkout_path, value, result, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(tmpdir, {'sphinx': {'builder': value}})
@@ -658,10 +668,12 @@ def test_sphinx_builder(
get_builder_class.assert_called_with(result)
@pytest.mark.skip(
- 'This test is not compatible with the new validation around doctype.')
+ 'This test is not compatible with the new validation around doctype.',
+ )
@patch('readthedocs.projects.tasks.get_builder_class')
def test_sphinx_builder_default(
- self, get_builder_class, checkout_path, tmpdir):
+ self, get_builder_class, checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(tmpdir, {})
@@ -677,7 +689,8 @@ def test_sphinx_builder_default(
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf')
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.run')
def test_sphinx_configuration_default(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
"""Should be default to find a conf.py file."""
checkout_path.return_value = str(tmpdir)
@@ -691,7 +704,7 @@ def test_sphinx_configuration_default(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -706,7 +719,8 @@ def test_sphinx_configuration_default(
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf')
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.run')
def test_sphinx_configuration_default(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
"""Should be default to find a conf.py file."""
checkout_path.return_value = str(tmpdir)
@@ -720,7 +734,7 @@ def test_sphinx_configuration_default(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -735,21 +749,24 @@ def test_sphinx_configuration_default(
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf')
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.run')
def test_sphinx_configuration(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
- apply_fs(tmpdir, {
- 'conf.py': '',
- 'docx': {
+ apply_fs(
+ tmpdir, {
'conf.py': '',
+ 'docx': {
+ 'conf.py': '',
+ },
},
- })
+ )
self.create_config_file(
tmpdir,
{
'sphinx': {
'configuration': 'docx/conf.py',
},
- }
+ },
)
update_docs = self.get_update_docs_task()
@@ -757,7 +774,7 @@ def test_sphinx_configuration(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -772,13 +789,16 @@ def test_sphinx_configuration(
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.append_conf')
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.run')
def test_sphinx_fail_on_warning(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
- apply_fs(tmpdir, {
- 'docx': {
- 'conf.py': '',
+ apply_fs(
+ tmpdir, {
+ 'docx': {
+ 'conf.py': '',
+ },
},
- })
+ )
self.create_config_file(
tmpdir,
{
@@ -786,7 +806,7 @@ def test_sphinx_fail_on_warning(
'configuration': 'docx/conf.py',
'fail_on_warning': True,
},
- }
+ },
)
update_docs = self.get_update_docs_task()
@@ -794,7 +814,7 @@ def test_sphinx_fail_on_warning(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -809,21 +829,24 @@ def test_sphinx_fail_on_warning(
@patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.append_conf')
@patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.run')
def test_mkdocs_configuration(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
- apply_fs(tmpdir, {
- 'mkdocs.yml': '',
- 'docx': {
+ apply_fs(
+ tmpdir, {
'mkdocs.yml': '',
+ 'docx': {
+ 'mkdocs.yml': '',
+ },
},
- })
+ )
self.create_config_file(
tmpdir,
{
'mkdocs': {
'configuration': 'docx/mkdocs.yml',
},
- }
+ },
)
self.project.documentation_type = 'mkdocs'
self.project.save()
@@ -833,7 +856,7 @@ def test_mkdocs_configuration(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -849,13 +872,16 @@ def test_mkdocs_configuration(
@patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.append_conf')
@patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.run')
def test_mkdocs_fail_on_warning(
- self, run, append_conf, move, checkout_path, tmpdir):
+ self, run, append_conf, move, checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
- apply_fs(tmpdir, {
- 'docx': {
- 'mkdocs.yml': '',
+ apply_fs(
+ tmpdir, {
+ 'docx': {
+ 'mkdocs.yml': '',
+ },
},
- })
+ )
self.create_config_file(
tmpdir,
{
@@ -863,7 +889,7 @@ def test_mkdocs_fail_on_warning(
'configuration': 'docx/mkdocs.yml',
'fail_on_warning': True,
},
- }
+ },
)
self.project.documentation_type = 'mkdocs'
self.project.save()
@@ -873,7 +899,7 @@ def test_mkdocs_fail_on_warning(
python_env = Virtualenv(
version=self.version,
build_env=update_docs.build_env,
- config=config
+ config=config,
)
update_docs.python_env = python_env
@@ -884,11 +910,17 @@ def test_mkdocs_fail_on_warning(
append_conf.assert_called_once()
move.assert_called_once()
- @pytest.mark.parametrize('value,expected', [(ALL, ['one', 'two', 'three']),
- (['one', 'two'], ['one', 'two'])])
+ @pytest.mark.parametrize(
+ 'value,expected', [
+ (ALL, ['one', 'two', 'three']),
+ (['one', 'two'], ['one', 'two']),
+ ],
+ )
@patch('readthedocs.vcs_support.backends.git.Backend.checkout_submodules')
- def test_submodules_include(self, checkout_submodules,
- checkout_path, tmpdir, value, expected):
+ def test_submodules_include(
+ self, checkout_submodules,
+ checkout_path, tmpdir, value, expected,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(
tmpdir,
@@ -896,7 +928,7 @@ def test_submodules_include(self, checkout_submodules,
'submodules': {
'include': value,
},
- }
+ },
)
git_repo = make_git_repo(str(tmpdir))
@@ -913,8 +945,10 @@ def test_submodules_include(self, checkout_submodules,
assert update_docs.config.submodules.recursive is False
@patch('readthedocs.vcs_support.backends.git.Backend.checkout_submodules')
- def test_submodules_exclude(self, checkout_submodules,
- checkout_path, tmpdir):
+ def test_submodules_exclude(
+ self, checkout_submodules,
+ checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(
tmpdir,
@@ -923,7 +957,7 @@ def test_submodules_exclude(self, checkout_submodules,
'exclude': ['one'],
'recursive': True,
},
- }
+ },
)
git_repo = make_git_repo(str(tmpdir))
@@ -940,8 +974,10 @@ def test_submodules_exclude(self, checkout_submodules,
assert update_docs.config.submodules.recursive is True
@patch('readthedocs.vcs_support.backends.git.Backend.checkout_submodules')
- def test_submodules_exclude_all(self, checkout_submodules,
- checkout_path, tmpdir):
+ def test_submodules_exclude_all(
+ self, checkout_submodules,
+ checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(
tmpdir,
@@ -950,7 +986,7 @@ def test_submodules_exclude_all(self, checkout_submodules,
'exclude': ALL,
'recursive': True,
},
- }
+ },
)
git_repo = make_git_repo(str(tmpdir))
@@ -965,13 +1001,15 @@ def test_submodules_exclude_all(self, checkout_submodules,
checkout_submodules.assert_not_called()
@patch('readthedocs.vcs_support.backends.git.Backend.checkout_submodules')
- def test_submodules_default_exclude_all(self, checkout_submodules,
- checkout_path, tmpdir):
+ def test_submodules_default_exclude_all(
+ self, checkout_submodules,
+ checkout_path, tmpdir,
+ ):
checkout_path.return_value = str(tmpdir)
self.create_config_file(
tmpdir,
- {}
+ {},
)
git_repo = make_git_repo(str(tmpdir))
diff --git a/readthedocs/rtd_tests/tests/test_core_tags.py b/readthedocs/rtd_tests/tests/test_core_tags.py
index 851fd40d1b6..d119a5d5bbe 100644
--- a/readthedocs/rtd_tests/tests/test_core_tags.py
+++ b/readthedocs/rtd_tests/tests/test_core_tags.py
@@ -1,20 +1,18 @@
# -*- coding: utf-8 -*-
-from __future__ import absolute_import
import mock
import pytest
-
from django.conf import settings
from django.test import TestCase
from django.test.utils import override_settings
-from readthedocs.projects.models import Project
from readthedocs.builds.constants import LATEST
from readthedocs.core.templatetags import core_tags
+from readthedocs.projects.models import Project
@override_settings(USE_SUBDOMAIN=False, PRODUCTION_DOMAIN='readthedocs.org')
class CoreTagsTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
url_base = '{scheme}://{domain}/docs/pip{{version}}'.format(
@@ -38,7 +36,7 @@ def setUp(self):
with mock.patch('readthedocs.projects.models.broadcast'):
self.client.login(username='eric', password='test')
self.pip = Project.objects.get(slug='pip')
- self.pip_fr = Project.objects.create(name="PIP-FR", slug='pip-fr', language='fr', main_language_project=self.pip)
+ self.pip_fr = Project.objects.create(name='PIP-FR', slug='pip-fr', language='fr', main_language_project=self.pip)
def test_project_only(self):
proj = Project.objects.get(slug='pip')
diff --git a/readthedocs/rtd_tests/tests/test_core_utils.py b/readthedocs/rtd_tests/tests/test_core_utils.py
index 85e7d735507..e57b46c0b17 100644
--- a/readthedocs/rtd_tests/tests/test_core_utils.py
+++ b/readthedocs/rtd_tests/tests/test_core_utils.py
@@ -1,15 +1,13 @@
# -*- coding: utf-8 -*-
-"""Test core util functions"""
+"""Test core util functions."""
-from __future__ import absolute_import
import mock
-
-from django_dynamic_fixture import get
from django.test import TestCase
+from django_dynamic_fixture import get
-from readthedocs.projects.models import Project
from readthedocs.builds.models import Version
-from readthedocs.core.utils import trigger_build, slugify
+from readthedocs.core.utils import slugify, trigger_build
+from readthedocs.projects.models import Project
class CoreUtilTests(TestCase):
@@ -32,7 +30,7 @@ def test_trigger_skipped_project(self, update_docs_task):
@mock.patch('readthedocs.projects.tasks.update_docs_task')
def test_trigger_custom_queue(self, update_docs):
- """Use a custom queue when routing the task"""
+ """Use a custom queue when routing the task."""
self.project.build_queue = 'build03'
trigger_build(project=self.project, version=self.version)
kwargs = {
@@ -58,7 +56,7 @@ def test_trigger_custom_queue(self, update_docs):
@mock.patch('readthedocs.projects.tasks.update_docs_task')
def test_trigger_build_time_limit(self, update_docs):
- """Pass of time limit"""
+ """Pass of time limit."""
trigger_build(project=self.project, version=self.version)
kwargs = {
'version_pk': self.version.pk,
@@ -83,7 +81,7 @@ def test_trigger_build_time_limit(self, update_docs):
@mock.patch('readthedocs.projects.tasks.update_docs_task')
def test_trigger_build_invalid_time_limit(self, update_docs):
- """Time limit as string"""
+ """Time limit as string."""
self.project.container_time_limit = '200s'
trigger_build(project=self.project, version=self.version)
kwargs = {
@@ -109,7 +107,7 @@ def test_trigger_build_invalid_time_limit(self, update_docs):
@mock.patch('readthedocs.projects.tasks.update_docs_task')
def test_trigger_build_rounded_time_limit(self, update_docs):
- """Time limit should round down"""
+ """Time limit should round down."""
self.project.container_time_limit = 3
trigger_build(project=self.project, version=self.version)
kwargs = {
@@ -134,14 +132,24 @@ def test_trigger_build_rounded_time_limit(self, update_docs):
update_docs.signature().apply_async.assert_called()
def test_slugify(self):
- """Test additional slugify"""
- self.assertEqual(slugify('This is a test'),
- 'this-is-a-test')
- self.assertEqual(slugify('project_with_underscores-v.1.0'),
- 'project-with-underscores-v10')
- self.assertEqual(slugify('project_with_underscores-v.1.0', dns_safe=False),
- 'project_with_underscores-v10')
- self.assertEqual(slugify('A title_-_with separated parts'),
- 'a-title-with-separated-parts')
- self.assertEqual(slugify('A title_-_with separated parts', dns_safe=False),
- 'a-title_-_with-separated-parts')
+ """Test additional slugify."""
+ self.assertEqual(
+ slugify('This is a test'),
+ 'this-is-a-test',
+ )
+ self.assertEqual(
+ slugify('project_with_underscores-v.1.0'),
+ 'project-with-underscores-v10',
+ )
+ self.assertEqual(
+ slugify('project_with_underscores-v.1.0', dns_safe=False),
+ 'project_with_underscores-v10',
+ )
+ self.assertEqual(
+ slugify('A title_-_with separated parts'),
+ 'a-title-with-separated-parts',
+ )
+ self.assertEqual(
+ slugify('A title_-_with separated parts', dns_safe=False),
+ 'a-title_-_with-separated-parts',
+ )
diff --git a/readthedocs/rtd_tests/tests/test_doc_builder.py b/readthedocs/rtd_tests/tests/test_doc_builder.py
index b8522679346..57bd6a15460 100644
--- a/readthedocs/rtd_tests/tests/test_doc_builder.py
+++ b/readthedocs/rtd_tests/tests/test_doc_builder.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import os
import tempfile
from collections import namedtuple
@@ -64,12 +61,12 @@ def test_conf_py_path(self, checkout_path, docs_dir):
for value, expected in (('conf.py', '/'), ('docs/conf.py', '/docs/')):
base_sphinx.config_file = os.path.join(
- tmp_dir, value
+ tmp_dir, value,
)
params = base_sphinx.get_config_params()
self.assertEqual(
params['conf_py_path'],
- expected
+ expected,
)
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.docs_dir')
@@ -80,7 +77,8 @@ def test_conf_py_path(self, checkout_path, docs_dir):
@patch('readthedocs.projects.models.Project.checkout_path')
def test_create_conf_py(
self, checkout_path, get_conf_py_path, _,
- get_config_params, create_index, docs_dir):
+ get_config_params, create_index, docs_dir,
+ ):
"""
Test for a project without ``conf.py`` file.
@@ -119,13 +117,13 @@ def test_create_conf_py(
os.path.dirname(__file__),
'..',
'files',
- 'conf.py'
+ 'conf.py',
)
with open(generated_conf_py) as gf, open(expected_conf_py) as ef:
autogenerated_confpy_lines = 28
self.assertEqual(
gf.readlines()[:autogenerated_confpy_lines],
- ef.readlines()[:autogenerated_confpy_lines]
+ ef.readlines()[:autogenerated_confpy_lines],
)
@patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.docs_dir')
@@ -136,7 +134,8 @@ def test_create_conf_py(
@patch('readthedocs.projects.models.Project.checkout_path')
def test_multiple_conf_py(
self, checkout_path, get_conf_py_path, _, get_config_params,
- create_index, docs_dir):
+ create_index, docs_dir,
+ ):
"""
Test for a project with multiple ``conf.py`` files.
@@ -307,14 +306,14 @@ def test_append_conf_create_yaml(self, checkout_path, run):
config = yaml.safe_load(open(generated_yaml))
self.assertEqual(
config['docs_dir'],
- os.path.join(tmpdir, 'docs')
+ os.path.join(tmpdir, 'docs'),
)
self.assertEqual(
config['extra_css'],
[
'http://readthedocs.org/static/css/badge_only.css',
- 'http://readthedocs.org/static/css/readthedocs-doc-embed.css'
- ]
+ 'http://readthedocs.org/static/css/readthedocs-doc-embed.css',
+ ],
)
self.assertEqual(
config['extra_javascript'],
@@ -322,14 +321,14 @@ def test_append_conf_create_yaml(self, checkout_path, run):
'readthedocs-data.js',
'http://readthedocs.org/static/core/js/readthedocs-doc-embed.js',
'http://readthedocs.org/static/javascript/readthedocs-analytics.js',
- ]
+ ],
)
self.assertIsNone(
config['google_analytics'],
)
self.assertEqual(
config['site_name'],
- 'mkdocs'
+ 'mkdocs',
)
@patch('readthedocs.doc_builder.base.BaseBuilder.run')
@@ -344,7 +343,7 @@ def test_append_conf_existing_yaml_on_root(self, checkout_path, run):
'google_analytics': ['UA-1234-5', 'mkdocs.org'],
'docs_dir': 'docs',
},
- open(yaml_file, 'w')
+ open(yaml_file, 'w'),
)
checkout_path.return_value = tmpdir
@@ -364,14 +363,14 @@ def test_append_conf_existing_yaml_on_root(self, checkout_path, run):
config = yaml.safe_load(open(yaml_file))
self.assertEqual(
config['docs_dir'],
- 'docs'
+ 'docs',
)
self.assertEqual(
config['extra_css'],
[
'http://readthedocs.org/static/css/badge_only.css',
- 'http://readthedocs.org/static/css/readthedocs-doc-embed.css'
- ]
+ 'http://readthedocs.org/static/css/readthedocs-doc-embed.css',
+ ],
)
self.assertEqual(
config['extra_javascript'],
@@ -379,14 +378,14 @@ def test_append_conf_existing_yaml_on_root(self, checkout_path, run):
'readthedocs-data.js',
'http://readthedocs.org/static/core/js/readthedocs-doc-embed.js',
'http://readthedocs.org/static/javascript/readthedocs-analytics.js',
- ]
+ ],
)
self.assertIsNone(
config['google_analytics'],
)
self.assertEqual(
config['site_name'],
- 'mkdocs'
+ 'mkdocs',
)
@patch('readthedocs.doc_builder.base.BaseBuilder.run')
@@ -435,7 +434,7 @@ def test_dont_override_theme(self, checkout_path, run):
'site_name': 'mkdocs',
'docs_dir': 'docs',
},
- open(yaml_file, 'w')
+ open(yaml_file, 'w'),
)
checkout_path.return_value = tmpdir
@@ -455,7 +454,7 @@ def test_dont_override_theme(self, checkout_path, run):
config = yaml.safe_load(open(yaml_file))
self.assertEqual(
config['theme_dir'],
- 'not-readthedocs'
+ 'not-readthedocs',
)
@patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.generate_rtd_data')
@@ -470,7 +469,7 @@ def test_write_js_data_docs_dir(self, checkout_path, run, generate_rtd_data):
'site_name': 'mkdocs',
'docs_dir': 'docs',
},
- open(yaml_file, 'w')
+ open(yaml_file, 'w'),
)
checkout_path.return_value = tmpdir
generate_rtd_data.return_value = ''
@@ -488,5 +487,5 @@ def test_write_js_data_docs_dir(self, checkout_path, run, generate_rtd_data):
generate_rtd_data.assert_called_with(
docs_dir='docs',
- mkdocs_config=mock.ANY
+ mkdocs_config=mock.ANY,
)
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py
index 6f3b8bc950a..25ce88bcb45 100644
--- a/readthedocs/rtd_tests/tests/test_doc_building.py
+++ b/readthedocs/rtd_tests/tests/test_doc_building.py
@@ -5,13 +5,6 @@
* raw subprocess calls like .communicate expects bytes
* the Command wrappers encapsulate the bytes and expose unicode
"""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import json
import os
import re
@@ -20,7 +13,6 @@
import mock
import pytest
-from builtins import str
from django.test import TestCase
from django_dynamic_fixture import get
from docker.errors import APIError as DockerAPIError
@@ -45,7 +37,7 @@
DUMMY_BUILD_ID = 123
-SAMPLE_UNICODE = u'HérÉ îß sömê ünïçó∂é'
+SAMPLE_UNICODE = 'HérÉ îß sömê ünïçó∂é'
SAMPLE_UTF8_BYTES = SAMPLE_UNICODE.encode('utf-8')
@@ -66,9 +58,11 @@ def tearDown(self):
def test_normal_execution(self):
"""Normal build in passing state."""
- self.mocks.configure_mock('process', {
- 'communicate.return_value': (b'This is okay', '')
- })
+ self.mocks.configure_mock(
+ 'process', {
+ 'communicate.return_value': (b'This is okay', ''),
+ },
+ )
type(self.mocks.process).returncode = PropertyMock(return_value=0)
build_env = LocalBuildEnvironment(
@@ -83,7 +77,7 @@ def test_normal_execution(self):
self.assertTrue(build_env.done)
self.assertTrue(build_env.successful)
self.assertEqual(len(build_env.commands), 1)
- self.assertEqual(build_env.commands[0].output, u'This is okay')
+ self.assertEqual(build_env.commands[0].output, 'This is okay')
# api() is not called anymore, we use api_v2 instead
self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called)
@@ -103,7 +97,7 @@ def test_normal_execution(self):
'version': self.version.pk,
'success': True,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'length': mock.ANY,
'error': '',
'setup': '',
@@ -115,9 +109,11 @@ def test_normal_execution(self):
def test_command_not_recorded(self):
"""Normal build in passing state with no command recorded."""
- self.mocks.configure_mock('process', {
- 'communicate.return_value': (b'This is okay', '')
- })
+ self.mocks.configure_mock(
+ 'process', {
+ 'communicate.return_value': (b'This is okay', ''),
+ },
+ )
type(self.mocks.process).returncode = PropertyMock(return_value=0)
build_env = LocalBuildEnvironment(
@@ -152,9 +148,11 @@ def test_command_not_recorded(self):
})
def test_record_command_as_success(self):
- self.mocks.configure_mock('process', {
- 'communicate.return_value': (b'This is okay', '')
- })
+ self.mocks.configure_mock(
+ 'process', {
+ 'communicate.return_value': (b'This is okay', ''),
+ },
+ )
type(self.mocks.process).returncode = PropertyMock(return_value=1)
build_env = LocalBuildEnvironment(
@@ -169,7 +167,7 @@ def test_record_command_as_success(self):
self.assertTrue(build_env.done)
self.assertTrue(build_env.successful)
self.assertEqual(len(build_env.commands), 1)
- self.assertEqual(build_env.commands[0].output, u'This is okay')
+ self.assertEqual(build_env.commands[0].output, 'This is okay')
# api() is not called anymore, we use api_v2 instead
self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called)
@@ -189,7 +187,7 @@ def test_record_command_as_success(self):
'version': self.version.pk,
'success': True,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'length': mock.ANY,
'error': '',
'setup': '',
@@ -236,9 +234,11 @@ def test_incremental_state_update_with_no_update(self):
def test_failing_execution(self):
"""Build in failing state."""
- self.mocks.configure_mock('process', {
- 'communicate.return_value': (b'This is not okay', '')
- })
+ self.mocks.configure_mock(
+ 'process', {
+ 'communicate.return_value': (b'This is not okay', ''),
+ },
+ )
type(self.mocks.process).returncode = PropertyMock(return_value=1)
build_env = LocalBuildEnvironment(
@@ -254,7 +254,7 @@ def test_failing_execution(self):
self.assertTrue(build_env.done)
self.assertTrue(build_env.failed)
self.assertEqual(len(build_env.commands), 1)
- self.assertEqual(build_env.commands[0].output, u'This is not okay')
+ self.assertEqual(build_env.commands[0].output, 'This is not okay')
# api() is not called anymore, we use api_v2 instead
self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called)
@@ -274,7 +274,7 @@ def test_failing_execution(self):
'version': self.version.pk,
'success': False,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'length': mock.ANY,
'error': '',
'setup': '',
@@ -513,8 +513,10 @@ def test_api_failure(self):
'docker_client', {
'create_container.side_effect': DockerAPIError(
'Failure creating container', response,
- 'Failure creating container')
- })
+ 'Failure creating container',
+ ),
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -537,13 +539,13 @@ def _inner():
'version': self.version.pk,
'success': False,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'exit_code': 1,
'length': mock.ANY,
'error': 'Build environment creation failed',
- 'setup': u'',
- 'output': u'',
- 'state': u'finished',
+ 'setup': '',
+ 'output': '',
+ 'state': 'finished',
'builder': mock.ANY,
})
@@ -554,8 +556,10 @@ def test_api_failure_on_docker_memory_limit(self):
'docker_client', {
'exec_create.side_effect': DockerAPIError(
'Failure creating container', response,
- 'Failure creating container'),
- })
+ 'Failure creating container',
+ ),
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -588,21 +592,23 @@ def test_api_failure_on_docker_memory_limit(self):
'version': self.version.pk,
'success': False,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'exit_code': -1,
'length': mock.ANY,
'error': '',
- 'setup': u'',
- 'output': u'',
- 'state': u'finished',
+ 'setup': '',
+ 'output': '',
+ 'state': 'finished',
'builder': mock.ANY,
})
def test_api_failure_on_error_in_exit(self):
response = Mock(status_code=500, reason='Internal Server Error')
- self.mocks.configure_mock('docker_client', {
- 'kill.side_effect': BuildEnvironmentError('Failed')
- })
+ self.mocks.configure_mock(
+ 'docker_client', {
+ 'kill.side_effect': BuildEnvironmentError('Failed'),
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -640,9 +646,11 @@ def test_api_failure_returns_previous_error_on_error_in_exit(self):
usable error to show the user.
"""
response = Mock(status_code=500, reason='Internal Server Error')
- self.mocks.configure_mock('docker_client', {
- 'kill.side_effect': BuildEnvironmentError('Outer failed')
- })
+ self.mocks.configure_mock(
+ 'docker_client', {
+ 'kill.side_effect': BuildEnvironmentError('Outer failed'),
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -679,7 +687,8 @@ def test_command_execution(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'This is the return',
'exec_inspect.return_value': {'ExitCode': 1},
- })
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -692,9 +701,10 @@ def test_command_execution(self):
self.mocks.docker_client.exec_create.assert_called_with(
container='build-123-project-6-pip',
- cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True)
+ cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True,
+ )
self.assertEqual(build_env.commands[0].exit_code, 1)
- self.assertEqual(build_env.commands[0].output, u'This is the return')
+ self.assertEqual(build_env.commands[0].output, 'This is the return')
self.assertEqual(build_env.commands[0].error, None)
self.assertTrue(build_env.failed)
@@ -733,7 +743,8 @@ def test_command_not_recorded(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'This is the return',
'exec_inspect.return_value': {'ExitCode': 1},
- })
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -746,7 +757,8 @@ def test_command_not_recorded(self):
self.mocks.docker_client.exec_create.assert_called_with(
container='build-123-project-6-pip',
- cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True)
+ cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True,
+ )
self.assertEqual(len(build_env.commands), 0)
self.assertFalse(build_env.failed)
@@ -774,7 +786,8 @@ def test_record_command_as_success(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'This is the return',
'exec_inspect.return_value': {'ExitCode': 1},
- })
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -787,9 +800,10 @@ def test_record_command_as_success(self):
self.mocks.docker_client.exec_create.assert_called_with(
container='build-123-project-6-pip',
- cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True)
+ cmd="/bin/sh -c 'cd /tmp && echo\\ test'", stderr=True, stdout=True,
+ )
self.assertEqual(build_env.commands[0].exit_code, 0)
- self.assertEqual(build_env.commands[0].output, u'This is the return')
+ self.assertEqual(build_env.commands[0].output, 'This is the return')
self.assertEqual(build_env.commands[0].error, None)
self.assertFalse(build_env.failed)
@@ -833,8 +847,9 @@ def test_command_execution_cleanup_exception(self):
'Failure killing container',
response,
'Failure killing container',
- )
- })
+ ),
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -845,7 +860,8 @@ def test_command_execution_cleanup_exception(self):
build_env.run('echo', 'test', cwd='/tmp')
self.mocks.docker_client.kill.assert_called_with(
- 'build-123-project-6-pip')
+ 'build-123-project-6-pip',
+ )
self.assertTrue(build_env.successful)
# api() is not called anymore, we use api_v2 instead
@@ -867,12 +883,12 @@ def test_command_execution_cleanup_exception(self):
'error': '',
'success': True,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'exit_code': 0,
'length': 0,
- 'setup': u'',
- 'output': u'',
- 'state': u'finished',
+ 'setup': '',
+ 'output': '',
+ 'state': 'finished',
'builder': mock.ANY,
})
@@ -884,7 +900,8 @@ def test_container_already_exists(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'This is the return',
'exec_inspect.return_value': {'ExitCode': 0},
- })
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -899,7 +916,8 @@ def _inner():
self.assertRaises(BuildEnvironmentError, _inner)
self.assertEqual(
str(build_env.failure),
- 'A build environment is currently running for this version')
+ 'A build environment is currently running for this version',
+ )
self.assertEqual(self.mocks.docker_client.exec_create.call_count, 0)
self.assertTrue(build_env.failed)
@@ -938,7 +956,8 @@ def test_container_timeout(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'This is the return',
'exec_inspect.return_value': {'ExitCode': 0},
- })
+ },
+ )
build_env = DockerBuildEnvironment(
version=self.version,
@@ -970,13 +989,13 @@ def test_container_timeout(self):
'version': self.version.pk,
'success': False,
'project': self.project.pk,
- 'setup_error': u'',
+ 'setup_error': '',
'exit_code': 1,
'length': 0,
'error': 'Build exited due to time out',
- 'setup': u'',
- 'output': u'',
- 'state': u'finished',
+ 'setup': '',
+ 'output': '',
+ 'state': 'finished',
'builder': mock.ANY,
})
@@ -1040,8 +1059,10 @@ def test_error_output(self):
self.assertEqual(cmd.output, 'FOOBAR')
self.assertIsNone(cmd.error)
# Test non-combined streams
- cmd = BuildCommand(['/bin/bash', '-c', 'echo -n FOOBAR 1>&2'],
- combine_output=False)
+ cmd = BuildCommand(
+ ['/bin/bash', '-c', 'echo -n FOOBAR 1>&2'],
+ combine_output=False,
+ )
cmd.run()
self.assertEqual(cmd.output, '')
self.assertEqual(cmd.error, 'FOOBAR')
@@ -1068,7 +1089,8 @@ def test_unicode_output(self, mock_subprocess):
cmd.run()
self.assertEqual(
cmd.output,
- u'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9')
+ 'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9',
+ )
class TestDockerBuildCommand(TestCase):
@@ -1084,8 +1106,10 @@ def tearDown(self):
def test_wrapped_command(self):
"""Test shell wrapping for Docker chdir."""
- cmd = DockerBuildCommand(['pip', 'install', 'requests'],
- cwd='/tmp/foobar')
+ cmd = DockerBuildCommand(
+ ['pip', 'install', 'requests'],
+ cwd='/tmp/foobar',
+ )
self.assertEqual(
cmd.get_wrapped_command(),
"/bin/sh -c 'cd /tmp/foobar && pip install requests'",
@@ -1097,9 +1121,11 @@ def test_wrapped_command(self):
)
self.assertEqual(
cmd.get_wrapped_command(),
- ('/bin/sh -c '
- "'cd /tmp/foobar && PATH=/tmp/foo:$PATH "
- r"python /tmp/foo/pip install Django\>1.7'"),
+ (
+ '/bin/sh -c '
+ "'cd /tmp/foobar && PATH=/tmp/foo:$PATH "
+ r"python /tmp/foo/pip install Django\>1.7'"
+ ),
)
def test_unicode_output(self):
@@ -1109,7 +1135,8 @@ def test_unicode_output(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': SAMPLE_UTF8_BYTES,
'exec_inspect.return_value': {'ExitCode': 0},
- })
+ },
+ )
cmd = DockerBuildCommand(['echo', 'test'], cwd='/tmp/foobar')
cmd.build_env = Mock()
cmd.build_env.get_client.return_value = self.mocks.docker_client
@@ -1117,7 +1144,8 @@ def test_unicode_output(self):
cmd.run()
self.assertEqual(
cmd.output,
- u'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9')
+ 'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9',
+ )
self.assertEqual(self.mocks.docker_client.exec_start.call_count, 1)
self.assertEqual(self.mocks.docker_client.exec_create.call_count, 1)
self.assertEqual(self.mocks.docker_client.exec_inspect.call_count, 1)
@@ -1129,7 +1157,8 @@ def test_command_oom_kill(self):
'exec_create.return_value': {'Id': b'container-foobar'},
'exec_start.return_value': b'Killed\n',
'exec_inspect.return_value': {'ExitCode': 137},
- })
+ },
+ )
cmd = DockerBuildCommand(['echo', 'test'], cwd='/tmp/foobar')
cmd.build_env = Mock()
cmd.build_env.get_client.return_value = self.mocks.docker_client
@@ -1137,7 +1166,7 @@ def test_command_oom_kill(self):
cmd.run()
self.assertIn(
'Command killed due to excessive memory consumption\n',
- str(cmd.output)
+ str(cmd.output),
)
@@ -1212,7 +1241,7 @@ def test_install_core_requirements_mkdocs(self, checkout_path):
checkout_path.return_value = tmpdir
python_env = Virtualenv(
version=self.version_mkdocs,
- build_env=self.build_env_mock
+ build_env=self.build_env_mock,
)
python_env.install_core_requirements()
requirements_mkdocs = [
@@ -1243,15 +1272,15 @@ def test_install_user_requirements(self, checkout_path):
self.build_env_mock.version = self.version_sphinx
python_env = Virtualenv(
version=self.version_sphinx,
- build_env=self.build_env_mock
+ build_env=self.build_env_mock,
)
checkout_path = python_env.checkout_path
docs_requirements = os.path.join(
- checkout_path, 'docs', 'requirements.txt'
+ checkout_path, 'docs', 'requirements.txt',
)
root_requirements = os.path.join(
- checkout_path, 'requirements.txt'
+ checkout_path, 'requirements.txt',
)
paths = {
os.path.join(checkout_path, 'docs'): True,
@@ -1265,7 +1294,7 @@ def test_install_user_requirements(self, checkout_path):
'--cache-dir',
mock.ANY, # cache path
'-r',
- 'requirements_file'
+ 'requirements_file',
]
# One requirements file on the docs/ dir
@@ -1352,7 +1381,7 @@ def test_install_core_requirements_sphinx_conda(self, checkout_path):
self.build_env_mock.run.assert_has_calls([
mock.call(*args_conda, cwd=mock.ANY),
- mock.call(*args_pip, bin_path=mock.ANY, cwd=mock.ANY)
+ mock.call(*args_pip, bin_path=mock.ANY, cwd=mock.ANY),
])
@patch('readthedocs.projects.models.Project.checkout_path')
@@ -1393,7 +1422,7 @@ def test_install_core_requirements_mkdocs_conda(self, checkout_path):
self.build_env_mock.run.assert_has_calls([
mock.call(*args_conda, cwd=mock.ANY),
- mock.call(*args_pip, bin_path=mock.ANY, cwd=mock.ANY)
+ mock.call(*args_pip, bin_path=mock.ANY, cwd=mock.ANY),
])
@patch('readthedocs.projects.models.Project.checkout_path')
@@ -1408,7 +1437,7 @@ def test_install_user_requirements_conda(self, checkout_path):
self.build_env_mock.run.assert_not_called()
-class AutoWipeEnvironmentBase(object):
+class AutoWipeEnvironmentBase:
fixtures = ['test_data']
build_env_class = None
diff --git a/readthedocs/rtd_tests/tests/test_doc_serving.py b/readthedocs/rtd_tests/tests/test_doc_serving.py
index 56798ad8499..d3e5ba74989 100644
--- a/readthedocs/rtd_tests/tests/test_doc_serving.py
+++ b/readthedocs/rtd_tests/tests/test_doc_serving.py
@@ -1,27 +1,23 @@
# -*- coding: utf-8 -*-
-from __future__ import absolute_import, unicode_literals, division, print_function
-import mock
-from mock import patch, mock_open
import django_dynamic_fixture as fixture
-import pytest
-import six
-
+import mock
+from django.conf import settings
from django.contrib.auth.models import User
+from django.http import Http404
from django.test import TestCase
from django.test.utils import override_settings
-from django.http import Http404
-from django.conf import settings
from django.urls import reverse
+from mock import mock_open, patch
-from readthedocs.rtd_tests.base import RequestFactoryTestMixin
+from readthedocs.core.views.serve import _serve_symlink_docs
from readthedocs.projects import constants
from readthedocs.projects.models import Project
-from readthedocs.core.views.serve import _serve_symlink_docs
+from readthedocs.rtd_tests.base import RequestFactoryTestMixin
@override_settings(
- USE_SUBDOMAIN=False, PUBLIC_DOMAIN='public.readthedocs.org', DEBUG=False
+ USE_SUBDOMAIN=False, PUBLIC_DOMAIN='public.readthedocs.org', DEBUG=False,
)
class BaseDocServing(RequestFactoryTestMixin, TestCase):
@@ -50,7 +46,7 @@ def test_private_python_media_serving(self):
serve_mock.assert_called_with(
request,
'en/latest/usage.html',
- settings.SITE_ROOT + '/private_web_root/private'
+ settings.SITE_ROOT + '/private_web_root/private',
)
@override_settings(PYTHON_MEDIA=False)
@@ -60,7 +56,7 @@ def test_private_nginx_serving(self):
r = _serve_symlink_docs(request, project=self.private, filename='/en/latest/usage.html', privacy_level='private')
self.assertEqual(r.status_code, 200)
self.assertEqual(
- r._headers['x-accel-redirect'][1], '/private_web_root/private/en/latest/usage.html'
+ r._headers['x-accel-redirect'][1], '/private_web_root/private/en/latest/usage.html',
)
@override_settings(PYTHON_MEDIA=False)
@@ -70,7 +66,7 @@ def test_private_nginx_serving_unicode_filename(self):
r = _serve_symlink_docs(request, project=self.private, filename='/en/latest/úñíčódé.html', privacy_level='private')
self.assertEqual(r.status_code, 200)
self.assertEqual(
- r._headers['x-accel-redirect'][1], '/private_web_root/private/en/latest/%C3%BA%C3%B1%C3%AD%C4%8D%C3%B3d%C3%A9.html'
+ r._headers['x-accel-redirect'][1], '/private_web_root/private/en/latest/%C3%BA%C3%B1%C3%AD%C4%8D%C3%B3d%C3%A9.html',
)
@override_settings(PYTHON_MEDIA=False)
@@ -116,7 +112,7 @@ def test_public_python_media_serving(self):
serve_mock.assert_called_with(
request,
'en/latest/usage.html',
- settings.SITE_ROOT + '/public_web_root/public'
+ settings.SITE_ROOT + '/public_web_root/public',
)
@override_settings(PYTHON_MEDIA=False)
@@ -126,7 +122,7 @@ def test_public_nginx_serving(self):
r = _serve_symlink_docs(request, project=self.public, filename='/en/latest/usage.html', privacy_level='public')
self.assertEqual(r.status_code, 200)
self.assertEqual(
- r._headers['x-accel-redirect'][1], '/public_web_root/public/en/latest/usage.html'
+ r._headers['x-accel-redirect'][1], '/public_web_root/public/en/latest/usage.html',
)
@override_settings(PYTHON_MEDIA=False)
@@ -164,7 +160,6 @@ def test_default_robots_txt(self):
read_data='My own robots.txt',
)
@patch('readthedocs.core.views.serve.os')
- @pytest.mark.skipif(six.PY2, reason='In Python2 the mock is __builtins__.open')
def test_custom_robots_txt(self, os_mock, open_mock):
os_mock.path.exists.return_value = True
self.public.versions.update(active=True, built=True)
diff --git a/readthedocs/rtd_tests/tests/test_domains.py b/readthedocs/rtd_tests/tests/test_domains.py
index 5e487ce2c94..8c9610ac420 100644
--- a/readthedocs/rtd_tests/tests/test_domains.py
+++ b/readthedocs/rtd_tests/tests/test_domains.py
@@ -1,18 +1,16 @@
# -*- coding: utf-8 -*-
-from __future__ import absolute_import
import json
from django.core.cache import cache
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
-
from django_dynamic_fixture import get
from readthedocs.core.middleware import SubdomainMiddleware
-from readthedocs.projects.models import Project, Domain
from readthedocs.projects.forms import DomainForm
+from readthedocs.projects.models import Domain, Project
class MiddlewareTests(TestCase):
@@ -72,33 +70,45 @@ def setUp(self):
self.project = get(Project, slug='kong')
def test_https(self):
- """Make sure https is an admin-only attribute"""
- form = DomainForm({'domain': 'example.com', 'canonical': True},
- project=self.project)
+ """Make sure https is an admin-only attribute."""
+ form = DomainForm(
+ {'domain': 'example.com', 'canonical': True},
+ project=self.project,
+ )
self.assertTrue(form.is_valid())
domain = form.save()
self.assertFalse(domain.https)
- form = DomainForm({'domain': 'example.com', 'canonical': True,
- 'https': True},
- project=self.project)
+ form = DomainForm(
+ {
+ 'domain': 'example.com', 'canonical': True,
+ 'https': True,
+ },
+ project=self.project,
+ )
self.assertFalse(form.is_valid())
def test_canonical_change(self):
- """Make sure canonical can be properly changed"""
- form = DomainForm({'domain': 'example.com', 'canonical': True},
- project=self.project)
+ """Make sure canonical can be properly changed."""
+ form = DomainForm(
+ {'domain': 'example.com', 'canonical': True},
+ project=self.project,
+ )
self.assertTrue(form.is_valid())
domain = form.save()
self.assertEqual(domain.domain, 'example.com')
- form = DomainForm({'domain': 'example2.com', 'canonical': True},
- project=self.project)
+ form = DomainForm(
+ {'domain': 'example2.com', 'canonical': True},
+ project=self.project,
+ )
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['canonical'][0], 'Only 1 Domain can be canonical at a time.')
- form = DomainForm({'domain': 'example2.com', 'canonical': True},
- project=self.project,
- instance=domain)
+ form = DomainForm(
+ {'domain': 'example2.com', 'canonical': True},
+ project=self.project,
+ instance=domain,
+ )
self.assertTrue(form.is_valid())
domain = form.save()
self.assertEqual(domain.domain, 'example2.com')
diff --git a/readthedocs/rtd_tests/tests/test_extend.py b/readthedocs/rtd_tests/tests/test_extend.py
index 205fa64e7bc..74ddbcf8d1f 100644
--- a/readthedocs/rtd_tests/tests/test_extend.py
+++ b/readthedocs/rtd_tests/tests/test_extend.py
@@ -1,13 +1,14 @@
-from __future__ import absolute_import
-from builtins import object
+# -*- coding: utf-8 -*-
from django.test import TestCase, override_settings
-from readthedocs.core.utils.extend import (SettingsOverrideObject,
- get_override_class)
+from readthedocs.core.utils.extend import (
+ SettingsOverrideObject,
+ get_override_class,
+)
# Top level to ensure module name is correct
-class FooBase(object):
+class FooBase:
def bar(self):
return 1
@@ -34,7 +35,7 @@ class ExtendTests(TestCase):
@override_settings(FOO_OVERRIDE_CLASS=None)
def test_no_override(self):
- """Test class without override"""
+ """Test class without override."""
class Foo(SettingsOverrideObject):
_default_class = FooBase
_override_setting = 'FOO_OVERRIDE_CLASS'
@@ -49,7 +50,7 @@ class Foo(SettingsOverrideObject):
@override_settings(FOO_OVERRIDE_CLASS=EXTEND_OVERRIDE_PATH)
def test_with_basic_override(self):
- """Test class override setting defined"""
+ """Test class override setting defined."""
class Foo(SettingsOverrideObject):
_default_class = FooBase
_override_setting = 'FOO_OVERRIDE_CLASS'
@@ -62,10 +63,12 @@ class Foo(SettingsOverrideObject):
override_class = get_override_class(Foo, Foo._default_class)
self.assertEqual(override_class, NewFoo)
- @override_settings(FOO_OVERRIDE_CLASS=None,
- CLASS_OVERRIDES={
- EXTEND_PATH: EXTEND_OVERRIDE_PATH,
- })
+ @override_settings(
+ FOO_OVERRIDE_CLASS=None,
+ CLASS_OVERRIDES={
+ EXTEND_PATH: EXTEND_OVERRIDE_PATH,
+ },
+ )
def test_with_advanced_override(self):
"""Test class with override using `CLASS_OVERRIDES`"""
class Foo(SettingsOverrideObject):
@@ -80,10 +83,12 @@ class Foo(SettingsOverrideObject):
override_class = get_override_class(Foo, Foo._default_class)
self.assertEqual(override_class, NewFoo)
- @override_settings(FOO_OVERRIDE_CLASS=None,
- CLASS_OVERRIDES={
- EXTEND_PATH: EXTEND_OVERRIDE_PATH,
- })
+ @override_settings(
+ FOO_OVERRIDE_CLASS=None,
+ CLASS_OVERRIDES={
+ EXTEND_PATH: EXTEND_OVERRIDE_PATH,
+ },
+ )
def test_with_advanced_override_only(self):
"""Test class with no `override_setting`"""
class Foo(SettingsOverrideObject):
diff --git a/readthedocs/rtd_tests/tests/test_footer.py b/readthedocs/rtd_tests/tests/test_footer.py
index ec5ca4d754a..60f820a7574 100644
--- a/readthedocs/rtd_tests/tests/test_footer.py
+++ b/readthedocs/rtd_tests/tests/test_footer.py
@@ -1,11 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import mock
from django.test import TestCase
from rest_framework.test import APIRequestFactory, APITestCase
diff --git a/readthedocs/rtd_tests/tests/test_gold.py b/readthedocs/rtd_tests/tests/test_gold.py
index 63f50f8d696..b52ec2938ae 100644
--- a/readthedocs/rtd_tests/tests/test_gold.py
+++ b/readthedocs/rtd_tests/tests/test_gold.py
@@ -1,11 +1,9 @@
-from __future__ import absolute_import
-
-from django.urls import reverse
+# -*- coding: utf-8 -*-
from django.test import TestCase
+from django.urls import reverse
+from django_dynamic_fixture import fixture, get
-from django_dynamic_fixture import get, fixture
-
-from readthedocs.gold.models import GoldUser, LEVEL_CHOICES
+from readthedocs.gold.models import LEVEL_CHOICES, GoldUser
from readthedocs.projects.models import Project
from readthedocs.rtd_tests.utils import create_user
@@ -36,7 +34,7 @@ def test_too_many_projects(self):
self.assertEqual(resp.status_code, 302)
resp = self.client.post(reverse('gold_projects'), data={'project': self.project2.slug})
self.assertFormError(
- resp, form='form', field=None, errors='You already have the max number of supported projects.'
+ resp, form='form', field=None, errors='You already have the max number of supported projects.',
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(self.golduser.projects.count(), 1)
diff --git a/readthedocs/rtd_tests/tests/test_imported_file.py b/readthedocs/rtd_tests/tests/test_imported_file.py
index 81f5e2a684b..187b1af9830 100644
--- a/readthedocs/rtd_tests/tests/test_imported_file.py
+++ b/readthedocs/rtd_tests/tests/test_imported_file.py
@@ -1,15 +1,17 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
import os
+
from django.test import TestCase
+from readthedocs.projects.models import ImportedFile, Project
from readthedocs.projects.tasks import _manage_imported_files
-from readthedocs.projects.models import Project, ImportedFile
+
base_dir = os.path.dirname(os.path.dirname(__file__))
class ImportedFileTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
self.project = Project.objects.get(slug='pip')
diff --git a/readthedocs/rtd_tests/tests/test_integrations.py b/readthedocs/rtd_tests/tests/test_integrations.py
index 51006b7e757..54e6922a4c6 100644
--- a/readthedocs/rtd_tests/tests/test_integrations.py
+++ b/readthedocs/rtd_tests/tests/test_integrations.py
@@ -1,22 +1,20 @@
-from __future__ import absolute_import
-
-from builtins import range
+# -*- coding: utf-8 -*-
import django_dynamic_fixture as fixture
-from django.test import TestCase, RequestFactory
-from django.contrib.contenttypes.models import ContentType
+from django.test import TestCase
from rest_framework.test import APIClient
-from rest_framework.test import APIRequestFactory
-from rest_framework.response import Response
from readthedocs.integrations.models import (
- HttpExchange, Integration, GitHubWebhook
+ GitHubWebhook,
+ HttpExchange,
+ Integration,
)
from readthedocs.projects.models import Project
class HttpExchangeTests(TestCase):
- """Test HttpExchange model by using existing views
+ """
+ Test HttpExchange model by using existing views.
This doesn't mock out a req/resp cycle, as manually creating these outside
views misses a number of attributes on the request object.
@@ -26,23 +24,27 @@ def test_exchange_json_request_body(self):
client = APIClient()
client.login(username='super', password='test')
project = fixture.get(Project, main_language_project=None)
- integration = fixture.get(Integration, project=project,
- integration_type=Integration.GITHUB_WEBHOOK,
- provider_data='')
+ integration = fixture.get(
+ Integration, project=project,
+ integration_type=Integration.GITHUB_WEBHOOK,
+ provider_data='',
+ )
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(project.slug),
+ '/api/v2/webhook/github/{}/'.format(project.slug),
{'ref': 'exchange_json'},
- format='json'
+ format='json',
)
exchange = HttpExchange.objects.get(integrations=integration)
self.assertEqual(
exchange.request_body,
- '{"ref": "exchange_json"}'
+ '{"ref": "exchange_json"}',
)
self.assertEqual(
exchange.request_headers,
- {u'Content-Type': u'application/json; charset=None',
- u'Cookie': u''}
+ {
+ 'Content-Type': 'application/json; charset=None',
+ 'Cookie': '',
+ },
)
self.assertEqual(
exchange.response_body,
@@ -51,31 +53,37 @@ def test_exchange_json_request_body(self):
)
self.assertEqual(
exchange.response_headers,
- {u'Allow': u'POST, OPTIONS',
- u'Content-Type': u'text/html; charset=utf-8'}
+ {
+ 'Allow': 'POST, OPTIONS',
+ 'Content-Type': 'text/html; charset=utf-8',
+ },
)
def test_exchange_form_request_body(self):
client = APIClient()
client.login(username='super', password='test')
project = fixture.get(Project, main_language_project=None)
- integration = fixture.get(Integration, project=project,
- integration_type=Integration.GITHUB_WEBHOOK,
- provider_data='')
+ integration = fixture.get(
+ Integration, project=project,
+ integration_type=Integration.GITHUB_WEBHOOK,
+ provider_data='',
+ )
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(project.slug),
+ '/api/v2/webhook/github/{}/'.format(project.slug),
'payload=%7B%22ref%22%3A+%22exchange_form%22%7D',
content_type='application/x-www-form-urlencoded',
)
exchange = HttpExchange.objects.get(integrations=integration)
self.assertEqual(
exchange.request_body,
- '{"ref": "exchange_form"}'
+ '{"ref": "exchange_form"}',
)
self.assertEqual(
exchange.request_headers,
- {u'Content-Type': u'application/x-www-form-urlencoded',
- u'Cookie': u''}
+ {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Cookie': '',
+ },
)
self.assertEqual(
exchange.response_body,
@@ -84,57 +92,63 @@ def test_exchange_form_request_body(self):
)
self.assertEqual(
exchange.response_headers,
- {u'Allow': u'POST, OPTIONS',
- u'Content-Type': u'text/html; charset=utf-8'}
+ {
+ 'Allow': 'POST, OPTIONS',
+ 'Content-Type': 'text/html; charset=utf-8',
+ },
)
def test_extraneous_exchanges_deleted_in_correct_order(self):
client = APIClient()
client.login(username='super', password='test')
project = fixture.get(Project, main_language_project=None)
- integration = fixture.get(Integration, project=project,
- integration_type=Integration.GITHUB_WEBHOOK,
- provider_data='')
+ integration = fixture.get(
+ Integration, project=project,
+ integration_type=Integration.GITHUB_WEBHOOK,
+ provider_data='',
+ )
self.assertEqual(
HttpExchange.objects.filter(integrations=integration).count(),
- 0
+ 0,
)
for _ in range(10):
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(project.slug),
+ '/api/v2/webhook/github/{}/'.format(project.slug),
{'ref': 'deleted'},
- format='json'
+ format='json',
)
for _ in range(10):
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(project.slug),
+ '/api/v2/webhook/github/{}/'.format(project.slug),
{'ref': 'preserved'},
- format='json'
+ format='json',
)
self.assertEqual(
HttpExchange.objects.filter(integrations=integration).count(),
- 10
+ 10,
)
self.assertEqual(
HttpExchange.objects.filter(
integrations=integration,
request_body='{"ref": "preserved"}',
).count(),
- 10
+ 10,
)
def test_request_headers_are_removed(self):
client = APIClient()
client.login(username='super', password='test')
project = fixture.get(Project, main_language_project=None)
- integration = fixture.get(Integration, project=project,
- integration_type=Integration.GITHUB_WEBHOOK,
- provider_data='')
+ integration = fixture.get(
+ Integration, project=project,
+ integration_type=Integration.GITHUB_WEBHOOK,
+ provider_data='',
+ )
resp = client.post(
- '/api/v2/webhook/github/{0}/'.format(project.slug),
+ '/api/v2/webhook/github/{}/'.format(project.slug),
{'ref': 'exchange_json'},
format='json',
HTTP_X_FORWARDED_FOR='1.2.3.4',
@@ -144,9 +158,11 @@ def test_request_headers_are_removed(self):
exchange = HttpExchange.objects.get(integrations=integration)
self.assertEqual(
exchange.request_headers,
- {u'Content-Type': u'application/json; charset=None',
- u'Cookie': u'',
- u'X-Foo': u'bar'}
+ {
+ 'Content-Type': 'application/json; charset=None',
+ 'Cookie': '',
+ 'X-Foo': 'bar',
+ },
)
@@ -156,7 +172,7 @@ def test_subclass_is_replaced_on_get(self):
project = fixture.get(Project, main_language_project=None)
integration = Integration.objects.create(
project=project,
- integration_type=Integration.GITHUB_WEBHOOK
+ integration_type=Integration.GITHUB_WEBHOOK,
)
integration = Integration.objects.get(pk=integration.pk)
self.assertIsInstance(integration, GitHubWebhook)
@@ -165,7 +181,7 @@ def test_subclass_is_replaced_on_subclass(self):
project = fixture.get(Project, main_language_project=None)
integration = Integration.objects.create(
project=project,
- integration_type=Integration.GITHUB_WEBHOOK
+ integration_type=Integration.GITHUB_WEBHOOK,
)
integration = Integration.objects.subclass(integration)
self.assertIsInstance(integration, GitHubWebhook)
diff --git a/readthedocs/rtd_tests/tests/test_middleware.py b/readthedocs/rtd_tests/tests/test_middleware.py
index 0bfe9b56304..4a93274da4a 100644
--- a/readthedocs/rtd_tests/tests/test_middleware.py
+++ b/readthedocs/rtd_tests/tests/test_middleware.py
@@ -1,23 +1,18 @@
# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-
-from django.http import Http404
+from corsheaders.middleware import CorsMiddleware
from django.conf import settings
from django.core.cache import cache
-from django.urls.base import get_urlconf, set_urlconf
+from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
-
+from django.urls.base import get_urlconf, set_urlconf
from django_dynamic_fixture import get
-
-from corsheaders.middleware import CorsMiddleware
from mock import patch
from readthedocs.core.middleware import SubdomainMiddleware
-from readthedocs.projects.models import Project, ProjectRelationship, Domain
-
+from readthedocs.projects.models import Domain, Project, ProjectRelationship
from readthedocs.rtd_tests.utils import create_user
@@ -152,7 +147,7 @@ def setUp(self):
self.project = get(
Project, slug='pip',
users=[self.owner], privacy_level='public',
- mail_language_project=None
+ mail_language_project=None,
)
self.subproject = get(
Project,
@@ -163,7 +158,7 @@ def setUp(self):
self.relationship = get(
ProjectRelationship,
parent=self.project,
- child=self.subproject
+ child=self.subproject,
)
self.domain = get(Domain, domain='my.valid.domain', project=self.project)
diff --git a/readthedocs/rtd_tests/tests/test_notifications.py b/readthedocs/rtd_tests/tests/test_notifications.py
index 1fa16323df4..1ead82e534a 100644
--- a/readthedocs/rtd_tests/tests/test_notifications.py
+++ b/readthedocs/rtd_tests/tests/test_notifications.py
@@ -1,26 +1,28 @@
# -*- coding: utf-8 -*-
-"""Notification tests"""
+"""Notification tests."""
+
-from __future__ import absolute_import
-from datetime import timedelta
-import mock
import django_dynamic_fixture as fixture
+import mock
+from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django.test import TestCase
from django.test.utils import override_settings
-from django.contrib.auth.models import User, AnonymousUser
-from django.utils import timezone
from messages_extends.models import Message as PersistentMessage
+from readthedocs.builds.models import Build
from readthedocs.notifications import Notification, SiteNotification
from readthedocs.notifications.backends import EmailBackend, SiteBackend
-from readthedocs.notifications.constants import ERROR, INFO_NON_PERSISTENT, WARNING_NON_PERSISTENT
+from readthedocs.notifications.constants import (
+ ERROR,
+ INFO_NON_PERSISTENT,
+ WARNING_NON_PERSISTENT,
+)
from readthedocs.projects.models import Project
from readthedocs.projects.notifications import (
- DeprecatedGitHubWebhookNotification,
DeprecatedBuildWebhookNotification,
+ DeprecatedGitHubWebhookNotification,
)
-from readthedocs.builds.models import Build
@override_settings(
@@ -46,21 +48,33 @@ class TestNotification(Notification):
req = mock.MagicMock()
notify = TestNotification(context_object=build, request=req)
- self.assertEqual(notify.get_template_names('email'),
- ['builds/notifications/foo_email.html'])
- self.assertEqual(notify.get_template_names('site'),
- ['builds/notifications/foo_site.html'])
- self.assertEqual(notify.get_subject(),
- 'This is {0}'.format(build.id))
- self.assertEqual(notify.get_context_data(),
- {'foo': build,
- 'production_uri': 'https://readthedocs.org',
- 'request': req})
+ self.assertEqual(
+ notify.get_template_names('email'),
+ ['builds/notifications/foo_email.html'],
+ )
+ self.assertEqual(
+ notify.get_template_names('site'),
+ ['builds/notifications/foo_site.html'],
+ )
+ self.assertEqual(
+ notify.get_subject(),
+ 'This is {}'.format(build.id),
+ )
+ self.assertEqual(
+ notify.get_context_data(),
+ {
+ 'foo': build,
+ 'production_uri': 'https://readthedocs.org',
+ 'request': req,
+ },
+ )
notify.render('site')
render_to_string.assert_has_calls([
- mock.call(context=mock.ANY,
- template_name=['builds/notifications/foo_site.html'])
+ mock.call(
+ context=mock.ANY,
+ template_name=['builds/notifications/foo_site.html'],
+ ),
])
@@ -90,10 +104,10 @@ class TestNotification(Notification):
request=mock.ANY,
template='core/email/common.txt',
context={'content': 'Test'},
- subject=u'This is {}'.format(build.id),
+ subject='This is {}'.format(build.id),
template_html='core/email/common.html',
recipient=user.email,
- )
+ ),
])
def test_message_backend(self, render_to_string):
@@ -118,7 +132,7 @@ class TestNotification(Notification):
self.assertEqual(message.user, user)
def test_message_anonymous_user(self, render_to_string):
- """Anonymous user still throwns exception on persistent messages"""
+ """Anonymous user still throwns exception on persistent messages."""
render_to_string.return_value = 'Test'
class TestNotification(Notification):
diff --git a/readthedocs/rtd_tests/tests/test_oauth.py b/readthedocs/rtd_tests/tests/test_oauth.py
index fedbaf0d47d..1ef675cc42c 100644
--- a/readthedocs/rtd_tests/tests/test_oauth.py
+++ b/readthedocs/rtd_tests/tests/test_oauth.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import mock
from django.conf import settings
from django.contrib.auth.models import User
@@ -10,7 +7,10 @@
from readthedocs.oauth.models import RemoteOrganization, RemoteRepository
from readthedocs.oauth.services import (
- BitbucketService, GitHubService, GitLabService)
+ BitbucketService,
+ GitHubService,
+ GitLabService,
+)
from readthedocs.projects import constants
from readthedocs.projects.models import Project
@@ -39,7 +39,8 @@ def test_make_project_pass(self):
'clone_url': 'https://github.com/testuser/testrepo.git',
}
repo = self.service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
self.assertIsInstance(repo, RemoteRepository)
self.assertEqual(repo.name, 'testrepo')
self.assertEqual(repo.full_name, 'testuser/testrepo')
@@ -51,9 +52,11 @@ def test_make_project_pass(self):
self.assertIn(self.user, repo.users.all())
self.assertEqual(repo.organization, self.org)
self.assertEqual(
- repo.clone_url, 'https://github.com/testuser/testrepo.git')
+ repo.clone_url, 'https://github.com/testuser/testrepo.git',
+ )
self.assertEqual(
- repo.ssh_url, 'ssh://git@github.com:testuser/testrepo.git')
+ repo.ssh_url, 'ssh://git@github.com:testuser/testrepo.git',
+ )
self.assertEqual(repo.html_url, 'https://github.com/testuser/testrepo')
def test_make_project_fail(self):
@@ -68,7 +71,8 @@ def test_make_project_fail(self):
'clone_url': '',
}
github_project = self.service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
self.assertIsNone(github_project)
def test_make_organization(self):
@@ -105,29 +109,35 @@ def test_multiple_users_same_repo(self):
}
github_project = self.service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
user2 = User.objects.get(pk=2)
service = GitHubService(user=user2, account=None)
github_project_2 = service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
self.assertIsInstance(github_project, RemoteRepository)
self.assertIsInstance(github_project_2, RemoteRepository)
self.assertNotEqual(github_project_2, github_project)
github_project_3 = self.service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
github_project_4 = service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
self.assertIsInstance(github_project_3, RemoteRepository)
self.assertIsInstance(github_project_4, RemoteRepository)
self.assertEqual(github_project, github_project_3)
self.assertEqual(github_project_2, github_project_4)
github_project_5 = self.service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
github_project_6 = service.create_repository(
- repo_json, organization=self.org, privacy=self.privacy)
+ repo_json, organization=self.org, privacy=self.privacy,
+ )
self.assertEqual(github_project, github_project_5)
self.assertEqual(github_project_2, github_project_6)
@@ -261,7 +271,8 @@ def setUp(self):
def test_make_project_pass(self):
repo = self.service.create_repository(
self.repo_response_data, organization=self.org,
- privacy=self.privacy)
+ privacy=self.privacy,
+ )
self.assertIsInstance(repo, RemoteRepository)
self.assertEqual(repo.name, 'tutorials.bitbucket.org')
self.assertEqual(repo.full_name, 'tutorials/tutorials.bitbucket.org')
@@ -269,24 +280,30 @@ def test_make_project_pass(self):
self.assertEqual(
repo.avatar_url, (
'https://bitbucket-assetroot.s3.amazonaws.com/c/photos/2012/Nov/28/'
- 'tutorials.bitbucket.org-logo-1456883302-9_avatar.png'))
+ 'tutorials.bitbucket.org-logo-1456883302-9_avatar.png'
+ ),
+ )
self.assertIn(self.user, repo.users.all())
self.assertEqual(repo.organization, self.org)
self.assertEqual(
repo.clone_url,
- 'https://bitbucket.org/tutorials/tutorials.bitbucket.org')
+ 'https://bitbucket.org/tutorials/tutorials.bitbucket.org',
+ )
self.assertEqual(
repo.ssh_url,
- 'ssh://hg@bitbucket.org/tutorials/tutorials.bitbucket.org')
+ 'ssh://hg@bitbucket.org/tutorials/tutorials.bitbucket.org',
+ )
self.assertEqual(
repo.html_url,
- 'https://bitbucket.org/tutorials/tutorials.bitbucket.org')
+ 'https://bitbucket.org/tutorials/tutorials.bitbucket.org',
+ )
def test_make_project_fail(self):
data = self.repo_response_data.copy()
data['is_private'] = True
repo = self.service.create_repository(
- data, organization=self.org, privacy=self.privacy)
+ data, organization=self.org, privacy=self.privacy,
+ )
self.assertIsNone(repo)
@override_settings(DEFAULT_PRIVACY_LEVEL='private')
@@ -307,7 +324,9 @@ def test_make_organization(self):
self.assertEqual(
org.avatar_url, (
'https://bitbucket-assetroot.s3.amazonaws.com/c/photos/2014/Sep/24/'
- 'teamsinspace-avatar-3731530358-7_avatar.png'))
+ 'teamsinspace-avatar-3731530358-7_avatar.png'
+ ),
+ )
self.assertEqual(org.url, 'https://bitbucket.org/teamsinspace')
def test_import_with_no_token(self):
@@ -430,7 +449,8 @@ def test_make_project_pass(self):
m.return_value = True
repo = self.service.create_repository(
self.repo_response_data, organization=self.org,
- privacy=self.privacy)
+ privacy=self.privacy,
+ )
self.assertIsInstance(repo, RemoteRepository)
self.assertEqual(repo.name, 'testrepo')
self.assertEqual(repo.full_name, 'testorga / testrepo')
@@ -453,13 +473,15 @@ def test_make_project_pass(self):
def test_make_private_project_fail(self):
repo = self.service.create_repository(
self.get_private_repo_data(), organization=self.org,
- privacy=self.privacy)
+ privacy=self.privacy,
+ )
self.assertIsNone(repo)
def test_make_private_project_success(self):
repo = self.service.create_repository(
self.get_private_repo_data(), organization=self.org,
- privacy=constants.PRIVATE)
+ privacy=constants.PRIVATE,
+ )
self.assertIsInstance(repo, RemoteRepository)
self.assertTrue(repo.private, True)
diff --git a/readthedocs/rtd_tests/tests/test_post_commit_hooks.py b/readthedocs/rtd_tests/tests/test_post_commit_hooks.py
index 86297dbe4b4..4dd40865e02 100644
--- a/readthedocs/rtd_tests/tests/test_post_commit_hooks.py
+++ b/readthedocs/rtd_tests/tests/test_post_commit_hooks.py
@@ -1,15 +1,14 @@
-from __future__ import absolute_import
-
+# -*- coding: utf-8 -*-
import json
import logging
+from urllib.parse import urlencode
import mock
from django.test import TestCase
from django_dynamic_fixture import get
-from future.backports.urllib.parse import urlencode
from readthedocs.builds.models import Version
-from readthedocs.projects.models import Project, Feature
+from readthedocs.projects.models import Feature, Project
log = logging.getLogger(__name__)
@@ -18,15 +17,19 @@
class BasePostCommitTest(TestCase):
def _setup(self):
self.rtfd = get(
- Project, repo='https://github.com/rtfd/readthedocs.org', slug='read-the-docs')
+ Project, repo='https://github.com/rtfd/readthedocs.org', slug='read-the-docs',
+ )
self.rtfd_not_ok = get(
- Version, project=self.rtfd, slug='not_ok', identifier='not_ok', active=False)
+ Version, project=self.rtfd, slug='not_ok', identifier='not_ok', active=False,
+ )
self.rtfd_awesome = get(
- Version, project=self.rtfd, slug='awesome', identifier='awesome', active=True)
+ Version, project=self.rtfd, slug='awesome', identifier='awesome', active=True,
+ )
self.pip = get(Project, repo='https://bitbucket.org/pip/pip', repo_type='hg')
self.pip_not_ok = get(
- Version, project=self.pip, slug='not_ok', identifier='not_ok', active=False)
+ Version, project=self.pip, slug='not_ok', identifier='not_ok', active=False,
+ )
self.sphinx = get(Project, repo='https://bitbucket.org/sphinx/sphinx', repo_type='git')
self.mocks = [mock.patch('readthedocs.core.views.hooks.trigger_build')]
@@ -48,91 +51,97 @@ def _setup(self):
class GitLabWebHookTest(BasePostCommitTest):
- fixtures = ["eric"]
+ fixtures = ['eric']
def setUp(self):
self._setup()
self.payload = {
- "object_kind": "push",
- "before": "95790bf891e76fee5e1747ab589903a6a1f80f22",
- "after": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "ref": "refs/heads/awesome",
- "checkout_sha": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "user_id": 4,
- "user_name": "John Smith",
- "user_email": "john@example.com",
- "project_id": 15,
- "project":{
- "name":"readthedocs",
- "description":"",
- "web_url":"http://example.com/mike/diaspora",
- "avatar_url": None,
- "git_ssh_url":"git@github.com:rtfd/readthedocs.org.git",
- "git_http_url":"http://github.com/rtfd/readthedocs.org.git",
- "namespace":"Mike",
- "visibility_level":0,
- "path_with_namespace":"mike/diaspora",
- "default_branch":"master",
- "homepage":"http://example.com/mike/diaspora",
- "url":"git@github.com/rtfd/readthedocs.org.git",
- "ssh_url":"git@github.com/rtfd/readthedocs.org.git",
- "http_url":"http://github.com/rtfd/readthedocs.org.git"
+ 'object_kind': 'push',
+ 'before': '95790bf891e76fee5e1747ab589903a6a1f80f22',
+ 'after': 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7',
+ 'ref': 'refs/heads/awesome',
+ 'checkout_sha': 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7',
+ 'user_id': 4,
+ 'user_name': 'John Smith',
+ 'user_email': 'john@example.com',
+ 'project_id': 15,
+ 'project':{
+ 'name':'readthedocs',
+ 'description':'',
+ 'web_url':'http://example.com/mike/diaspora',
+ 'avatar_url': None,
+ 'git_ssh_url':'git@github.com:rtfd/readthedocs.org.git',
+ 'git_http_url':'http://github.com/rtfd/readthedocs.org.git',
+ 'namespace':'Mike',
+ 'visibility_level':0,
+ 'path_with_namespace':'mike/diaspora',
+ 'default_branch':'master',
+ 'homepage':'http://example.com/mike/diaspora',
+ 'url':'git@github.com/rtfd/readthedocs.org.git',
+ 'ssh_url':'git@github.com/rtfd/readthedocs.org.git',
+ 'http_url':'http://github.com/rtfd/readthedocs.org.git',
},
- "repository":{
- "name": "Diaspora",
- "url": "git@github.com:rtfd/readthedocs.org.git",
- "description": "",
- "homepage": "http://github.com/rtfd/readthedocs.org",
- "git_http_url": "http://github.com/rtfd/readthedocs.org.git",
- "git_ssh_url": "git@github.com:rtfd/readthedocs.org.git",
- "visibility_level": 0
+ 'repository':{
+ 'name': 'Diaspora',
+ 'url': 'git@github.com:rtfd/readthedocs.org.git',
+ 'description': '',
+ 'homepage': 'http://github.com/rtfd/readthedocs.org',
+ 'git_http_url': 'http://github.com/rtfd/readthedocs.org.git',
+ 'git_ssh_url': 'git@github.com:rtfd/readthedocs.org.git',
+ 'visibility_level': 0,
},
- "commits": [
+ 'commits': [
{
- "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
- "message": "Update Catalan translation to e38cb41.",
- "timestamp": "2011-12-12T14:27:31+02:00",
- "url": "http://example.com/mike/diaspora/commit/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
- "author": {
- "name": "Jordi Mallach",
- "email": "jordi@softcatala.org"
+ 'id': 'b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327',
+ 'message': 'Update Catalan translation to e38cb41.',
+ 'timestamp': '2011-12-12T14:27:31+02:00',
+ 'url': 'http://example.com/mike/diaspora/commit/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327',
+ 'author': {
+ 'name': 'Jordi Mallach',
+ 'email': 'jordi@softcatala.org',
},
- "added": ["CHANGELOG"],
- "modified": ["app/controller/application.rb"],
- "removed": []
+ 'added': ['CHANGELOG'],
+ 'modified': ['app/controller/application.rb'],
+ 'removed': [],
},
{
- "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "message": "fixed readme",
- "timestamp": "2012-01-03T23:36:29+02:00",
- "url": "http://example.com/mike/diaspora/commit/da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "author": {
- "name": "GitLab dev user",
- "email": "gitlabdev@dv6700.(none)"
+ 'id': 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7',
+ 'message': 'fixed readme',
+ 'timestamp': '2012-01-03T23:36:29+02:00',
+ 'url': 'http://example.com/mike/diaspora/commit/da1560886d4f094c3e6c9ef40349f7d38b5d27d7',
+ 'author': {
+ 'name': 'GitLab dev user',
+ 'email': 'gitlabdev@dv6700.(none)',
},
- "added": ["CHANGELOG"],
- "modified": ["app/controller/application.rb"],
- "removed": []
- }
+ 'added': ['CHANGELOG'],
+ 'modified': ['app/controller/application.rb'],
+ 'removed': [],
+ },
],
- "total_commits_count": 4
+ 'total_commits_count': 4,
}
def test_gitlab_post_commit_hook_builds_branch_docs_if_it_should(self):
- """GitLab webhook should only build active versions"""
- r = self.client.post('/gitlab/', data=json.dumps(self.payload),
- content_type='application/json')
+ """GitLab webhook should only build active versions."""
+ r = self.client.post(
+ '/gitlab/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: github.com/rtfd/readthedocs.org [awesome]')
self.payload['ref'] = 'refs/heads/not_ok'
- r = self.client.post('/gitlab/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/gitlab/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Not Building: github.com/rtfd/readthedocs.org [not_ok]')
self.payload['ref'] = 'refs/heads/unknown'
- r = self.client.post('/gitlab/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/gitlab/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) No known branches were pushed to.')
def test_gitlab_post_commit_knows_default_branches(self):
@@ -146,8 +155,10 @@ def test_gitlab_post_commit_knows_default_branches(self):
rtd.save()
self.payload['ref'] = 'refs/heads/master'
- r = self.client.post('/gitlab/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/gitlab/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: github.com/rtfd/readthedocs.org [latest]')
rtd.default_branch = old_default
@@ -161,7 +172,7 @@ def test_gitlab_request_empty_url(self):
self.payload['project']['http_url'] = ''
r = self.client.post(
'/gitlab/', data=json.dumps(self.payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 404)
@@ -178,110 +189,123 @@ def test_gitlab_webhook_is_deprecated(self):
r = self.client.post(
'/gitlab/',
data=json.dumps(payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 403)
class GitHubWebHookTest(BasePostCommitTest):
- fixtures = ["eric"]
+ fixtures = ['eric']
def setUp(self):
self._setup()
self.payload = {
- "after": "5ad757394b926e5637ffeafe340f952ef48bd270",
- "base_ref": "refs/heads/master",
- "before": "5b4e453dc913b08642b1d4fb10ed23c9d6e5b129",
- "commits": [
+ 'after': '5ad757394b926e5637ffeafe340f952ef48bd270',
+ 'base_ref': 'refs/heads/master',
+ 'before': '5b4e453dc913b08642b1d4fb10ed23c9d6e5b129',
+ 'commits': [
{
- "added": [],
- "author": {
- "email": "eric@ericholscher.com",
- "name": "Eric Holscher",
- "username": "ericholscher"
+ 'added': [],
+ 'author': {
+ 'email': 'eric@ericholscher.com',
+ 'name': 'Eric Holscher',
+ 'username': 'ericholscher',
},
- "distinct": False,
- "id": "11f229c6a78f5bc8cb173104a3f7a68cdb7eb15a",
- "message": "Fix it on the front list as well.",
- "modified": [
- "readthedocs/templates/core/project_list_detailed.html"
+ 'distinct': False,
+ 'id': '11f229c6a78f5bc8cb173104a3f7a68cdb7eb15a',
+ 'message': 'Fix it on the front list as well.',
+ 'modified': [
+ 'readthedocs/templates/core/project_list_detailed.html',
],
- "removed": [],
- "timestamp": "2011-09-12T19:38:55-07:00",
- "url": ("https://github.com/wraithan/readthedocs.org/"
- "commit/11f229c6a78f5bc8cb173104a3f7a68cdb7eb15a")
+ 'removed': [],
+ 'timestamp': '2011-09-12T19:38:55-07:00',
+ 'url': (
+ 'https://github.com/wraithan/readthedocs.org/'
+ 'commit/11f229c6a78f5bc8cb173104a3f7a68cdb7eb15a'
+ ),
},
],
- "compare": ("https://github.com/wraithan/readthedocs.org/compare/"
- "5b4e453...5ad7573"),
- "created": False,
- "deleted": False,
- "forced": False,
- "pusher": {
- "name": "none"
+ 'compare': (
+ 'https://github.com/wraithan/readthedocs.org/compare/'
+ '5b4e453...5ad7573'
+ ),
+ 'created': False,
+ 'deleted': False,
+ 'forced': False,
+ 'pusher': {
+ 'name': 'none',
},
- "ref": "refs/heads/awesome",
- "repository": {
- "created_at": "2011/09/09 14:20:13 -0700",
- "description": "source code to readthedocs.org",
- "fork": True,
- "forks": 0,
- "has_downloads": True,
- "has_issues": False,
- "has_wiki": True,
- "homepage": "http://rtfd.org/",
- "language": "Python",
- "name": "readthedocs.org",
- "open_issues": 0,
- "owner": {
- "email": "XWraithanX@gmail.com",
- "name": "wraithan"
+ 'ref': 'refs/heads/awesome',
+ 'repository': {
+ 'created_at': '2011/09/09 14:20:13 -0700',
+ 'description': 'source code to readthedocs.org',
+ 'fork': True,
+ 'forks': 0,
+ 'has_downloads': True,
+ 'has_issues': False,
+ 'has_wiki': True,
+ 'homepage': 'http://rtfd.org/',
+ 'language': 'Python',
+ 'name': 'readthedocs.org',
+ 'open_issues': 0,
+ 'owner': {
+ 'email': 'XWraithanX@gmail.com',
+ 'name': 'wraithan',
},
- "private": False,
- "pushed_at": "2011/09/12 22:33:34 -0700",
- "size": 140,
- "url": "https://github.com/rtfd/readthedocs.org",
- "ssh_url": "git@github.com:rtfd/readthedocs.org.git",
- "watchers": 1
-
- }
+ 'private': False,
+ 'pushed_at': '2011/09/12 22:33:34 -0700',
+ 'size': 140,
+ 'url': 'https://github.com/rtfd/readthedocs.org',
+ 'ssh_url': 'git@github.com:rtfd/readthedocs.org.git',
+ 'watchers': 1,
+
+ },
}
def test_post_types(self):
- """Ensure various POST formats"""
- r = self.client.post('/github/',
- data=json.dumps(self.payload),
- content_type='application/json')
+ """Ensure various POST formats."""
+ r = self.client.post(
+ '/github/',
+ data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertEqual(r.status_code, 200)
- r = self.client.post('/github/',
- data=urlencode({'payload': json.dumps(self.payload)}),
- content_type='application/x-www-form-urlencoded')
+ r = self.client.post(
+ '/github/',
+ data=urlencode({'payload': json.dumps(self.payload)}),
+ content_type='application/x-www-form-urlencoded',
+ )
self.assertEqual(r.status_code, 200)
def test_github_upper_case_repo(self):
"""
Test the github post commit hook will build properly with upper case
repository.
+
This allows for capitalization differences in post-commit hook URL's.
"""
payload = self.payload.copy()
payload['repository']['url'] = payload['repository']['url'].upper()
- r = self.client.post('/github/', data=json.dumps(payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: HTTPS://GITHUB.COM/RTFD/READTHEDOCS.ORG [awesome]')
self.payload['ref'] = 'refs/heads/not_ok'
def test_400_on_no_ref(self):
"""
GitHub sometimes sends us a post-commit hook without a ref.
- This means we don't know what branch to build,
- so return a 400.
+
+ This means we don't know what branch to build, so return a 400.
"""
payload = self.payload.copy()
del payload['ref']
- r = self.client.post('/github/', data=json.dumps(payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(payload),
+ content_type='application/json',
+ )
self.assertEqual(r.status_code, 400)
def test_github_request_empty_url(self):
@@ -293,7 +317,7 @@ def test_github_request_empty_url(self):
self.payload['repository']['ssh_url'] = ''
r = self.client.post(
'/github/', data=json.dumps(self.payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 403)
@@ -301,16 +325,17 @@ def test_private_repo_mapping(self):
"""
Test for private GitHub repo mapping.
- Previously we were missing triggering post-commit hooks because
- we only compared against the *public* ``github.com/user/repo`` URL.
- Users can also enter a ``github.com:user/repo`` URL,
- which we should support.
+ Previously we were missing triggering post-commit hooks because we only
+ compared against the *public* ``github.com/user/repo`` URL. Users can
+ also enter a ``github.com:user/repo`` URL, which we should support.
"""
self.rtfd.repo = 'git@github.com:rtfd/readthedocs.org'
self.rtfd.save()
payload = self.payload.copy()
- r = self.client.post('/github/', data=json.dumps(payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: github.com/rtfd/readthedocs.org [awesome]')
def test_github_post_commit_hook_builds_branch_docs_if_it_should(self):
@@ -319,18 +344,24 @@ def test_github_post_commit_hook_builds_branch_docs_if_it_should(self):
versions that are set to be built if the branch they refer to
is updated. Otherwise it is no op.
"""
- r = self.client.post('/github/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: github.com/rtfd/readthedocs.org [awesome]')
self.payload['ref'] = 'refs/heads/not_ok'
- r = self.client.post('/github/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Not Building: github.com/rtfd/readthedocs.org [not_ok]')
self.payload['ref'] = 'refs/heads/unknown'
- r = self.client.post('/github/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) No known branches were pushed to.')
def test_github_post_commit_knows_default_branches(self):
@@ -344,8 +375,10 @@ def test_github_post_commit_knows_default_branches(self):
rtd.save()
self.payload['ref'] = 'refs/heads/master'
- r = self.client.post('/github/', data=json.dumps(self.payload),
- content_type='application/json')
+ r = self.client.post(
+ '/github/', data=json.dumps(self.payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: github.com/rtfd/readthedocs.org [latest]')
rtd.default_branch = old_default
@@ -364,13 +397,13 @@ def test_github_webhook_is_deprecated(self):
r = self.client.post(
'/github/',
data=json.dumps(payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 403)
class CorePostCommitTest(BasePostCommitTest):
- fixtures = ["eric"]
+ fixtures = ['eric']
def setUp(self):
self._setup()
@@ -381,8 +414,10 @@ def test_core_commit_hook(self):
rtd.save()
r = self.client.post('/build/%s' % rtd.pk, {'version_slug': 'master'})
self.assertEqual(r.status_code, 302)
- self.assertEqual(r._headers['location'][1],
- '/projects/read-the-docs/builds/')
+ self.assertEqual(
+ r._headers['location'][1],
+ '/projects/read-the-docs/builds/',
+ )
def test_hook_state_tracking(self):
rtd = Project.objects.get(slug='read-the-docs')
@@ -404,123 +439,139 @@ def setUp(self):
self._setup()
self.hg_payload = {
- "canon_url": "https://bitbucket.org",
- "commits": [
+ 'canon_url': 'https://bitbucket.org',
+ 'commits': [
{
- "author": "marcus",
- "branch": "default",
- "files": [
+ 'author': 'marcus',
+ 'branch': 'default',
+ 'files': [
{
- "file": "somefile.py",
- "type": "modified"
- }
+ 'file': 'somefile.py',
+ 'type': 'modified',
+ },
],
- "message": "Added some feature things",
- "node": "d14d26a93fd2",
- "parents": [
- "1b458191f31a"
+ 'message': 'Added some feature things',
+ 'node': 'd14d26a93fd2',
+ 'parents': [
+ '1b458191f31a',
],
- "raw_author": "Marcus Bertrand ",
- "raw_node": "d14d26a93fd28d3166fa81c0cd3b6f339bb95bfe",
- "revision": 3,
- "size": -1,
- "timestamp": "2012-05-30 06:07:03",
- "utctimestamp": "2012-05-30 04:07:03+00:00"
- }
+ 'raw_author': 'Marcus Bertrand ',
+ 'raw_node': 'd14d26a93fd28d3166fa81c0cd3b6f339bb95bfe',
+ 'revision': 3,
+ 'size': -1,
+ 'timestamp': '2012-05-30 06:07:03',
+ 'utctimestamp': '2012-05-30 04:07:03+00:00',
+ },
],
- "repository": {
- "absolute_url": "/pip/pip/",
- "fork": False,
- "is_private": True,
- "name": "Project X",
- "owner": "marcus",
- "scm": "hg",
- "slug": "project-x",
- "website": ""
+ 'repository': {
+ 'absolute_url': '/pip/pip/',
+ 'fork': False,
+ 'is_private': True,
+ 'name': 'Project X',
+ 'owner': 'marcus',
+ 'scm': 'hg',
+ 'slug': 'project-x',
+ 'website': '',
},
- "user": "marcus"
+ 'user': 'marcus',
}
self.git_payload = {
- "canon_url": "https://bitbucket.org",
- "commits": [
+ 'canon_url': 'https://bitbucket.org',
+ 'commits': [
{
- "author": "marcus",
- "branch": "master",
- "files": [
+ 'author': 'marcus',
+ 'branch': 'master',
+ 'files': [
{
- "file": "somefile.py",
- "type": "modified"
- }
+ 'file': 'somefile.py',
+ 'type': 'modified',
+ },
],
- "message": "Added some more things to somefile.py\n",
- "node": "620ade18607a",
- "parents": [
- "702c70160afc"
+ 'message': 'Added some more things to somefile.py\n',
+ 'node': '620ade18607a',
+ 'parents': [
+ '702c70160afc',
],
- "raw_author": "Marcus Bertrand ",
- "raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9",
- "revision": None,
- "size": -1,
- "timestamp": "2012-05-30 05:58:56",
- "utctimestamp": "2012-05-30 03:58:56+00:00"
- }
+ 'raw_author': 'Marcus Bertrand ',
+ 'raw_node': '620ade18607ac42d872b568bb92acaa9a28620e9',
+ 'revision': None,
+ 'size': -1,
+ 'timestamp': '2012-05-30 05:58:56',
+ 'utctimestamp': '2012-05-30 03:58:56+00:00',
+ },
],
- "repository": {
- "absolute_url": "/sphinx/sphinx/",
- "fork": False,
- "is_private": True,
- "name": "Project X",
- "owner": "marcus",
- "scm": "git",
- "slug": "project-x",
- "website": "https://atlassian.com/"
+ 'repository': {
+ 'absolute_url': '/sphinx/sphinx/',
+ 'fork': False,
+ 'is_private': True,
+ 'name': 'Project X',
+ 'owner': 'marcus',
+ 'scm': 'git',
+ 'slug': 'project-x',
+ 'website': 'https://atlassian.com/',
},
- "user": "marcus"
+ 'user': 'marcus',
}
def test_post_types(self):
- """Ensure various POST formats"""
- r = self.client.post('/bitbucket/',
- data=json.dumps(self.hg_payload),
- content_type='application/json')
+ """Ensure various POST formats."""
+ r = self.client.post(
+ '/bitbucket/',
+ data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertEqual(r.status_code, 200)
- r = self.client.post('/bitbucket/',
- data=urlencode({'payload': json.dumps(self.hg_payload)}),
- content_type='application/x-www-form-urlencoded')
+ r = self.client.post(
+ '/bitbucket/',
+ data=urlencode({'payload': json.dumps(self.hg_payload)}),
+ content_type='application/x-www-form-urlencoded',
+ )
self.assertEqual(r.status_code, 200)
def test_bitbucket_post_commit(self):
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: bitbucket.org/pip/pip [latest]')
- r = self.client.post('/bitbucket/', data=json.dumps(self.git_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.git_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: bitbucket.org/sphinx/sphinx [latest]')
def test_bitbucket_post_commit_empty_commit_list(self):
self.hg_payload['commits'] = []
self.git_payload['commits'] = []
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, 'Commit/branch not found', status_code=404)
- r = self.client.post('/bitbucket/', data=json.dumps(self.git_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.git_payload),
+ content_type='application/json',
+ )
self.assertContains(r, 'Commit/branch not found', status_code=404)
def test_bitbucket_post_commit_non_existent_url(self):
self.hg_payload['repository']['absolute_url'] = '/invalid/repository'
self.git_payload['repository']['absolute_url'] = '/invalid/repository'
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, 'Project match not found', status_code=404)
- r = self.client.post('/bitbucket/', data=json.dumps(self.git_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.git_payload),
+ content_type='application/json',
+ )
self.assertContains(r, 'Project match not found', status_code=404)
@@ -530,22 +581,28 @@ def test_bitbucket_post_commit_hook_builds_branch_docs_if_it_should(self):
versions that are set to be built if the branch they refer to
is updated. Otherwise it is no op.
"""
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: bitbucket.org/pip/pip [latest]')
self.hg_payload['commits'] = [{
"branch": "not_ok",
}]
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Not Building: bitbucket.org/pip/pip [not_ok]')
self.hg_payload['commits'] = [{
"branch": "unknown",
}]
- r = self.client.post('/bitbucket/', data=json.dumps(self.hg_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.hg_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) No known branches were pushed to.')
def test_bitbucket_default_branch(self):
@@ -556,14 +613,16 @@ def test_bitbucket_default_branch(self):
self.feature.projects.add(self.test_project)
self.git_payload['commits'] = [{
- "branch": "integration",
+ 'branch': 'integration',
}]
self.git_payload['repository'] = {
- 'absolute_url': '/test/project/'
+ 'absolute_url': '/test/project/',
}
- r = self.client.post('/bitbucket/', data=json.dumps(self.git_payload),
- content_type='application/json')
+ r = self.client.post(
+ '/bitbucket/', data=json.dumps(self.git_payload),
+ content_type='application/json',
+ )
self.assertContains(r, '(URL Build) Build Started: bitbucket.org/test/project [latest]')
def test_bitbucket_request_empty_url(self):
@@ -574,7 +633,7 @@ def test_bitbucket_request_empty_url(self):
self.git_payload['repository']['absolute_url'] = ''
r = self.client.post(
'/bitbucket/', data=json.dumps(self.git_payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 400)
@@ -591,6 +650,6 @@ def test_bitbucket_webhook_is_deprecated(self):
r = self.client.post(
'/bitbucket/',
data=json.dumps(payload),
- content_type='application/json'
+ content_type='application/json',
)
self.assertEqual(r.status_code, 403)
diff --git a/readthedocs/rtd_tests/tests/test_privacy.py b/readthedocs/rtd_tests/tests/test_privacy.py
index 8a7359c9055..2f55a0fd9e3 100644
--- a/readthedocs/rtd_tests/tests/test_privacy.py
+++ b/readthedocs/rtd_tests/tests/test_privacy.py
@@ -1,17 +1,18 @@
-from __future__ import absolute_import
-import logging
+# -*- coding: utf-8 -*-
import json
-import mock
+import logging
+import mock
+from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
-from django.contrib.auth.models import User
from readthedocs.builds.constants import LATEST
-from readthedocs.builds.models import Version, Build
-from readthedocs.projects.models import Project
-from readthedocs.projects.forms import UpdateProjectForm
+from readthedocs.builds.models import Build, Version
from readthedocs.projects import tasks
+from readthedocs.projects.forms import UpdateProjectForm
+from readthedocs.projects.models import Project
+
log = logging.getLogger(__name__)
@@ -29,27 +30,32 @@ def setUp(self):
tasks.update_docs_task.delay = mock.Mock()
- def _create_kong(self, privacy_level='private',
- version_privacy_level='private'):
+ def _create_kong(
+ self, privacy_level='private',
+ version_privacy_level='private',
+ ):
self.client.login(username='eric', password='test')
log.info(
- "Making kong with privacy: %s and version privacy: %s",
+ 'Making kong with privacy: %s and version privacy: %s',
privacy_level,
version_privacy_level,
)
# Create project via project form, simulate import wizard without magic
form = UpdateProjectForm(
- data={'repo_type': 'git',
- 'repo': 'https://github.com/ericholscher/django-kong',
- 'name': 'Django Kong',
- 'language': 'en',
- 'default_branch': '',
- 'project_url': 'http://django-kong.rtfd.org',
- 'default_version': LATEST,
- 'python_interpreter': 'python',
- 'description': 'OOHHH AH AH AH KONG SMASH',
- 'documentation_type': 'sphinx'},
- user=User.objects.get(username='eric'))
+ data={
+ 'repo_type': 'git',
+ 'repo': 'https://github.com/ericholscher/django-kong',
+ 'name': 'Django Kong',
+ 'language': 'en',
+ 'default_branch': '',
+ 'project_url': 'http://django-kong.rtfd.org',
+ 'default_version': LATEST,
+ 'python_interpreter': 'python',
+ 'description': 'OOHHH AH AH AH KONG SMASH',
+ 'documentation_type': 'sphinx',
+ },
+ user=User.objects.get(username='eric'),
+ )
proj = form.save()
# Update these directly, no form has all the fields we need
proj.privacy_level = privacy_level
@@ -129,8 +135,10 @@ def test_private_branch(self):
kong = self._create_kong('public', 'private')
self.client.login(username='eric', password='test')
- Version.objects.create(project=kong, identifier='test id',
- verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True)
+ Version.objects.create(
+ project=kong, identifier='test id',
+ verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True,
+ )
self.assertEqual(Version.objects.count(), 2)
self.assertEqual(Version.objects.get(slug='test-slug').privacy_level, 'private')
r = self.client.get('/projects/django-kong/')
@@ -149,9 +157,11 @@ def test_public_branch(self):
kong = self._create_kong('public', 'public')
self.client.login(username='eric', password='test')
- Version.objects.create(project=kong, identifier='test id',
- verbose_name='test verbose', slug='test-slug',
- active=True, built=True)
+ Version.objects.create(
+ project=kong, identifier='test id',
+ verbose_name='test verbose', slug='test-slug',
+ active=True, built=True,
+ )
self.assertEqual(Version.objects.count(), 2)
self.assertEqual(Version.objects.all()[0].privacy_level, 'public')
r = self.client.get('/projects/django-kong/')
@@ -165,22 +175,30 @@ def test_public_branch(self):
def test_public_repo_api(self):
self._create_kong('public', 'public')
self.client.login(username='eric', password='test')
- resp = self.client.get("http://testserver/api/v1/project/django-kong/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/django-kong/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
- resp = self.client.get("http://testserver/api/v1/project/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['meta']['total_count'], 1)
self.client.login(username='tester', password='test')
- resp = self.client.get("http://testserver/api/v1/project/django-kong/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/django-kong/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
- resp = self.client.get("http://testserver/api/v1/project/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['meta']['total_count'], 1)
@@ -188,21 +206,29 @@ def test_public_repo_api(self):
def test_private_repo_api(self):
self._create_kong('private', 'private')
self.client.login(username='eric', password='test')
- resp = self.client.get("http://testserver/api/v1/project/django-kong/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/django-kong/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
- resp = self.client.get("http://testserver/api/v1/project/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['meta']['total_count'], 1)
self.client.login(username='tester', password='test')
- resp = self.client.get("http://testserver/api/v1/project/django-kong/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/django-kong/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 404)
- resp = self.client.get("http://testserver/api/v1/project/",
- data={"format": "json"})
+ resp = self.client.get(
+ 'http://testserver/api/v1/project/',
+ data={'format': 'json'},
+ )
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['meta']['total_count'], 0)
@@ -211,11 +237,17 @@ def test_private_doc_serving(self):
kong = self._create_kong('public', 'private')
self.client.login(username='eric', password='test')
- Version.objects.create(project=kong, identifier='test id',
- verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True)
- self.client.post('/dashboard/django-kong/versions/',
- {'version-test-slug': 'on',
- 'privacy-test-slug': 'private'})
+ Version.objects.create(
+ project=kong, identifier='test id',
+ verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True,
+ )
+ self.client.post(
+ '/dashboard/django-kong/versions/',
+ {
+ 'version-test-slug': 'on',
+ 'privacy-test-slug': 'private',
+ },
+ )
r = self.client.get('/docs/django-kong/en/test-slug/')
self.client.login(username='eric', password='test')
self.assertEqual(r.status_code, 404)
@@ -349,8 +381,10 @@ def test_build_filtering(self):
kong = self._create_kong('public', 'private')
self.client.login(username='eric', password='test')
- ver = Version.objects.create(project=kong, identifier='test id',
- verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True)
+ ver = Version.objects.create(
+ project=kong, identifier='test id',
+ verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True,
+ )
r = self.client.get('/projects/django-kong/builds/')
self.assertContains(r, 'test-slug')
@@ -365,11 +399,9 @@ def test_build_filtering(self):
self.assertNotContains(r, 'test-slug')
def test_queryset_chaining(self):
- """
- Test that manager methods get set on related querysets.
- """
+ """Test that manager methods get set on related querysets."""
kong = self._create_kong('public', 'private')
self.assertEqual(
kong.versions.private().get(slug='latest').slug,
- 'latest'
+ 'latest',
)
diff --git a/readthedocs/rtd_tests/tests/test_privacy_urls.py b/readthedocs/rtd_tests/tests/test_privacy_urls.py
index 937574fbafe..9bffbb82558 100644
--- a/readthedocs/rtd_tests/tests/test_privacy_urls.py
+++ b/readthedocs/rtd_tests/tests/test_privacy_urls.py
@@ -1,25 +1,23 @@
-from __future__ import absolute_import
-from __future__ import print_function
+# -*- coding: utf-8 -*-
import re
+import mock
from allauth.socialaccount.models import SocialAccount
-from builtins import object
from django.contrib.admindocs.views import extract_views_from_urlpatterns
from django.test import TestCase
from django.urls import reverse
from django_dynamic_fixture import get
-import mock
from taggit.models import Tag
from readthedocs.builds.models import Build, BuildCommandResult
from readthedocs.core.utils.tasks import TaskNoPermission
from readthedocs.integrations.models import HttpExchange, Integration
-from readthedocs.projects.models import Project, Domain, EnvironmentVariable
-from readthedocs.oauth.models import RemoteRepository, RemoteOrganization
+from readthedocs.oauth.models import RemoteOrganization, RemoteRepository
+from readthedocs.projects.models import Domain, EnvironmentVariable, Project
from readthedocs.rtd_tests.utils import create_user
-class URLAccessMixin(object):
+class URLAccessMixin:
default_kwargs = {}
response_data = {}
@@ -73,8 +71,10 @@ def assertResponse(self, path, name=None, method=None, data=None, **kwargs):
val,
('Attribute mismatch for view {view} ({path}): '
'{key} != {expected} (got {value})'
- .format(view=name, path=path, key=key, expected=val,
- value=resp_val))
+ .format(
+ view=name, path=path, key=key, expected=val,
+ value=resp_val,
+ )),
)
return response
@@ -93,10 +93,10 @@ def setUp(self):
for not_obj in self.context_data:
if isinstance(obj, list) or isinstance(obj, set) or isinstance(obj, tuple):
self.assertNotIn(not_obj, obj)
- print("%s not in %s" % (not_obj, obj))
+ print('{} not in {}'.format(not_obj, obj))
else:
self.assertNotEqual(not_obj, obj)
- print("%s is not %s" % (not_obj, obj))
+ print('{} is not {}'.format(not_obj, obj))
def _test_url(self, urlpatterns):
deconstructed_urls = extract_views_from_urlpatterns(urlpatterns)
@@ -106,7 +106,8 @@ def _test_url(self, urlpatterns):
url_ctx = self.get_url_path_ctx()
if url_ctx:
self.response_data = {
- url.format(**url_ctx): data for url, data in self.response_data.items()}
+ url.format(**url_ctx): data for url, data in self.response_data.items()
+ }
for (view, regex, namespace, name) in deconstructed_urls:
request_data = self.request_data.get(name, {}).copy()
@@ -125,20 +126,26 @@ def setUp(self):
# Previous Fixtures
self.owner = create_user(username='owner', password='test')
self.tester = create_user(username='tester', password='test')
- self.pip = get(Project, slug='pip', users=[self.owner],
- privacy_level='public', main_language_project=None)
- self.private = get(Project, slug='private', privacy_level='private',
- main_language_project=None)
+ self.pip = get(
+ Project, slug='pip', users=[self.owner],
+ privacy_level='public', main_language_project=None,
+ )
+ self.private = get(
+ Project, slug='private', privacy_level='private',
+ main_language_project=None,
+ )
class ProjectMixin(URLAccessMixin):
def setUp(self):
- super(ProjectMixin, self).setUp()
+ super().setUp()
self.build = get(Build, project=self.pip)
self.tag = get(Tag, slug='coolness')
- self.subproject = get(Project, slug='sub', language='ja',
- users=[self.owner], main_language_project=None)
+ self.subproject = get(
+ Project, slug='sub', language='ja',
+ users=[self.owner], main_language_project=None,
+ )
self.pip.add_subproject(self.subproject)
self.pip.translations.add(self.subproject)
self.integration = get(Integration, project=self.pip, provider_data='')
@@ -313,7 +320,7 @@ def is_admin(self):
class APIMixin(URLAccessMixin):
def setUp(self):
- super(APIMixin, self).setUp()
+ super().setUp()
self.build = get(Build, project=self.pip)
self.build_command_result = get(BuildCommandResult, project=self.pip)
self.domain = get(Domain, url='http://docs.foobar.com', project=self.pip)
diff --git a/readthedocs/rtd_tests/tests/test_profile_views.py b/readthedocs/rtd_tests/tests/test_profile_views.py
index 9e3a75a449e..de7c244bf1b 100644
--- a/readthedocs/rtd_tests/tests/test_profile_views.py
+++ b/readthedocs/rtd_tests/tests/test_profile_views.py
@@ -1,8 +1,7 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
-from django.urls import reverse
from django.test import TestCase
+from django.urls import reverse
from django_dynamic_fixture import get
@@ -25,7 +24,7 @@ def test_edit_profile(self):
'first_name': 'Read',
'last_name': 'Docs',
'homepage': 'readthedocs.org',
- }
+ },
)
self.assertTrue(resp.status_code, 200)
@@ -47,7 +46,7 @@ def test_edit_profile_with_invalid_values(self):
'first_name': 'a' * 31,
'last_name': 'b' * 31,
'homepage': 'c' * 101,
- }
+ },
)
FORM_ERROR_FORMAT = 'Ensure this value has at most {} characters (it has {}).'
@@ -58,20 +57,20 @@ def test_edit_profile_with_invalid_values(self):
def test_delete_account(self):
resp = self.client.get(
- reverse('delete_account')
+ reverse('delete_account'),
)
self.assertEqual(resp.status_code, 200)
resp = self.client.post(
reverse('delete_account'),
data={
'username': self.user.username,
- }
+ },
)
self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['Location'], reverse('homepage'))
self.assertFalse(
- User.objects.filter(username=self.user.username).exists()
+ User.objects.filter(username=self.user.username).exists(),
)
def test_profile_detail(self):
@@ -95,7 +94,7 @@ def test_profile_detail_not_found(self):
def test_account_advertising(self):
resp = self.client.get(
- reverse('account_advertising')
+ reverse('account_advertising'),
)
self.assertEqual(resp.status_code, 200)
self.assertTrue(self.user.profile.allow_ads)
diff --git a/readthedocs/rtd_tests/tests/test_project.py b/readthedocs/rtd_tests/tests/test_project.py
index 679b761a25a..4a56bb9395e 100644
--- a/readthedocs/rtd_tests/tests/test_project.py
+++ b/readthedocs/rtd_tests/tests/test_project.py
@@ -1,20 +1,21 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import datetime
import json
from django.contrib.auth.models import User
from django.forms.models import model_to_dict
from django.test import TestCase
-from django_dynamic_fixture import get
from django.utils import timezone
+from django_dynamic_fixture import get
from mock import patch
from rest_framework.reverse import reverse
from readthedocs.builds.constants import (
- BUILD_STATE_CLONING, BUILD_STATE_FINISHED, BUILD_STATE_TRIGGERED, LATEST)
+ BUILD_STATE_CLONING,
+ BUILD_STATE_FINISHED,
+ BUILD_STATE_TRIGGERED,
+ LATEST,
+)
from readthedocs.builds.models import Build
from readthedocs.projects.exceptions import ProjectConfigurationError
from readthedocs.projects.models import Project
@@ -22,7 +23,7 @@
from readthedocs.rtd_tests.mocks.paths import fake_paths_by_regex
-class ProjectMixin(object):
+class ProjectMixin:
fixtures = ['eric', 'test_data']
@@ -103,7 +104,8 @@ def test_conf_file_not_found(self, find_method, full_find_method):
full_find_method.return_value = []
with self.assertRaisesMessage(
ProjectConfigurationError,
- ProjectConfigurationError.NOT_FOUND) as cm:
+ ProjectConfigurationError.NOT_FOUND,
+ ) as cm:
self.pip.conf_file()
@patch('readthedocs.projects.models.Project.find')
@@ -115,7 +117,8 @@ def test_multiple_conf_files(self, find_method):
]
with self.assertRaisesMessage(
ProjectConfigurationError,
- ProjectConfigurationError.MULTIPLE_CONF_FILES) as cm:
+ ProjectConfigurationError.MULTIPLE_CONF_FILES,
+ ) as cm:
self.pip.conf_file()
@@ -162,23 +165,24 @@ def test_translation_delete(self):
self.assertFalse(Project.objects.filter(pk=project_delete.pk).exists())
self.assertTrue(Project.objects.filter(pk=project_keep.pk).exists())
self.assertIsNone(
- Project.objects.get(pk=project_keep.pk).main_language_project)
+ Project.objects.get(pk=project_keep.pk).main_language_project,
+ )
def test_user_can_add_own_project_as_translation(self):
user_a = User.objects.get(username='eric')
project_a = get(
Project, users=[user_a],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
project_b = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
self.client.login(username=user_a.username, password='test')
self.client.post(
reverse('projects_translations', args=[project_a.slug]),
- data={'project': project_b.slug}
+ data={'project': project_b.slug},
)
self.assertEqual(project_a.translations.first(), project_b)
@@ -190,20 +194,20 @@ def test_user_can_add_project_as_translation_if_is_owner(self):
user_a = User.objects.get(username='eric')
project_a = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
user_b = User.objects.get(username='tester')
# User A and B are owners of project B
project_b = get(
Project, users=[user_b, user_a],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
self.client.login(username=user_a.username, password='test')
self.client.post(
reverse('projects_translations', args=[project_a.slug]),
- data={'project': project_b.slug}
+ data={'project': project_b.slug},
)
self.assertEqual(project_a.translations.first(), project_b)
@@ -213,20 +217,20 @@ def test_user_can_not_add_other_user_project_as_translation(self):
user_a = User.objects.get(username='eric')
project_a = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
user_b = User.objects.get(username='tester')
project_b = get(
Project, users=[user_b],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
# User A try to add project B as translation of project A
self.client.login(username=user_a.username, password='test')
resp = self.client.post(
reverse('projects_translations', args=[project_a.slug]),
- data={'project': project_b.slug}
+ data={'project': project_b.slug},
)
self.assertContains(resp, 'Select a valid choice')
@@ -242,13 +246,13 @@ def test_previous_users_can_list_and_delete_translations_not_owner(self):
user_a = User.objects.get(username='eric')
project_a = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
user_b = User.objects.get(username='tester')
project_b = get(
Project, users=[user_b],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
project_a.translations.add(project_b)
@@ -258,16 +262,16 @@ def test_previous_users_can_list_and_delete_translations_not_owner(self):
# Project B is listed under user A translations
resp = self.client.get(
- reverse('projects_translations', args=[project_a.slug])
+ reverse('projects_translations', args=[project_a.slug]),
)
self.assertContains(resp, project_b.slug)
resp = self.client.post(
reverse(
'projects_translations_delete',
- args=[project_a.slug, project_b.slug]
+ args=[project_a.slug, project_b.slug],
),
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 200)
self.assertNotIn(project_b, project_a.translations.all())
@@ -276,11 +280,11 @@ def test_user_cant_delete_other_user_translations(self):
user_a = User.objects.get(username='eric')
project_a = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
project_b = get(
Project, users=[user_a],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
project_a.translations.add(project_b)
@@ -289,11 +293,11 @@ def test_user_cant_delete_other_user_translations(self):
user_b = User.objects.get(username='tester')
project_c = get(
Project, users=[user_b],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
project_d = get(
Project, users=[user_b, user_a],
- language='en', main_language_project=None
+ language='en', main_language_project=None,
)
project_d.translations.add(project_c)
project_d.save()
@@ -304,9 +308,9 @@ def test_user_cant_delete_other_user_translations(self):
resp = self.client.post(
reverse(
'projects_translations_delete',
- args=[project_a.slug, project_b.slug]
+ args=[project_a.slug, project_b.slug],
),
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 404)
self.assertIn(project_b, project_a.translations.all())
@@ -318,9 +322,9 @@ def test_user_cant_delete_other_user_translations(self):
resp = self.client.post(
reverse(
'projects_translations_delete',
- args=[project_d.slug, project_b.slug]
+ args=[project_d.slug, project_b.slug],
),
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 404)
self.assertIn(project_b, project_a.translations.all())
@@ -332,9 +336,9 @@ def test_user_cant_delete_other_user_translations(self):
resp = self.client.post(
reverse(
'projects_translations_delete',
- args=[project_b.slug, project_b.slug]
+ args=[project_b.slug, project_b.slug],
),
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 404)
self.assertIn(project_b, project_a.translations.all())
@@ -344,7 +348,7 @@ def test_user_cant_change_lang_to_translation_lang(self):
project_a = Project.objects.get(slug='read-the-docs')
project_b = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
project_a.translations.add(project_b)
@@ -361,16 +365,16 @@ def test_user_cant_change_lang_to_translation_lang(self):
resp = self.client.post(
reverse(
'projects_edit',
- args=[project_a.slug]
+ args=[project_a.slug],
),
data=data,
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 200)
self.assertContains(
resp,
'There is already a "es" translation '
- 'for the read-the-docs project'
+ 'for the read-the-docs project',
)
def test_user_can_change_project_with_same_lang(self):
@@ -378,7 +382,7 @@ def test_user_can_change_project_with_same_lang(self):
project_a = Project.objects.get(slug='read-the-docs')
project_b = get(
Project, users=[user_a],
- language='es', main_language_project=None
+ language='es', main_language_project=None,
)
project_a.translations.add(project_b)
@@ -395,10 +399,10 @@ def test_user_can_change_project_with_same_lang(self):
resp = self.client.post(
reverse(
'projects_edit',
- args=[project_a.slug]
+ args=[project_a.slug],
),
data=data,
- follow=True
+ follow=True,
)
self.assertEqual(resp.status_code, 200)
self.assertNotContains(resp, 'There is already a')
@@ -429,7 +433,8 @@ def setUp(self):
state=BUILD_STATE_TRIGGERED,
)
self.build_2.date = (
- timezone.now() - datetime.timedelta(hours=1))
+ timezone.now() - datetime.timedelta(hours=1)
+ )
self.build_2.save()
# Build started an hour ago with custom time (2 hours)
@@ -439,7 +444,8 @@ def setUp(self):
state=BUILD_STATE_TRIGGERED,
)
self.build_3.date = (
- timezone.now() - datetime.timedelta(hours=1))
+ timezone.now() - datetime.timedelta(hours=1)
+ )
self.build_3.save()
def test_finish_inactive_builds_task(self):
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py
index d5604cb97b8..cd927eed564 100644
--- a/readthedocs/rtd_tests/tests/test_project_forms.py
+++ b/readthedocs/rtd_tests/tests/test_project_forms.py
@@ -1,12 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import mock
from django.contrib.auth.models import User
from django.test import TestCase
@@ -32,7 +25,7 @@
TranslationForm,
UpdateProjectForm,
)
-from readthedocs.projects.models import Project, EnvironmentVariable
+from readthedocs.projects.models import EnvironmentVariable, Project
class TestProjectForms(TestCase):
@@ -167,7 +160,7 @@ def test_changing_vcs_should_not_change_latest_is_not_none(self):
def test_length_of_tags(self):
data = {
'documentation_type': 'sphinx',
- 'language': 'en'
+ 'language': 'en',
}
data['tags'] = '{},{}'.format('a'*50, 'b'*99)
form = ProjectExtraForm(data)
@@ -176,7 +169,7 @@ def test_length_of_tags(self):
data['tags'] = '{},{}'.format('a'*90, 'b'*100)
form = ProjectExtraForm(data)
self.assertTrue(form.is_valid())
-
+
data['tags'] = '{},{}'.format('a'*99, 'b'*101)
form = ProjectExtraForm(data)
self.assertFalse(form.is_valid())
@@ -219,7 +212,7 @@ def setUp(self):
slug='public-4',
active=False,
privacy_level=PUBLIC,
- identifier='public/4'
+ identifier='public/4',
)
get(
Version,
@@ -243,10 +236,10 @@ def test_list_only_active_versions_on_default_version(self):
# This version is created automatically by the project on save
self.assertTrue(self.project.versions.filter(slug=LATEST).exists())
self.assertEqual(
- set(
+ {
slug
for slug, _ in form.fields['default_version'].widget.choices
- ),
+ },
{'latest', 'public-1', 'public-2', 'private', 'protected'},
)
@@ -255,13 +248,13 @@ def test_list_all_versions_on_default_branch(self):
# This version is created automatically by the project on save
self.assertTrue(self.project.versions.filter(slug=LATEST).exists())
self.assertEqual(
- set(
+ {
identifier
for identifier, _ in form.fields['default_branch'].widget.choices
- ),
+ },
{
None, 'master', 'public-1', 'public-2',
- 'public-3', 'public/4', 'protected', 'private'
+ 'public-3', 'public/4', 'protected', 'private',
},
)
@@ -282,7 +275,7 @@ def setUp(self):
self.project_s_fr = self.get_project(
lang='fr',
- users=[self.user_b, self.user_a]
+ users=[self.user_b, self.user_a],
)
def get_project(self, lang, users, **kwargs):
@@ -307,7 +300,7 @@ def test_list_only_owner_projects(self):
]
self.assertEqual(
{proj_slug for proj_slug, _ in form.fields['project'].choices},
- {project.slug for project in expected_projects}
+ {project.slug for project in expected_projects},
)
form = TranslationForm(
@@ -322,7 +315,7 @@ def test_list_only_owner_projects(self):
]
self.assertEqual(
{proj_slug for proj_slug, _ in form.fields['project'].choices},
- {project.slug for project in expected_projects}
+ {project.slug for project in expected_projects},
)
def test_excludes_existing_translations(self):
@@ -343,7 +336,7 @@ def test_excludes_existing_translations(self):
]
self.assertEqual(
{proj_slug for proj_slug, _ in form.fields['project'].choices},
- {project.slug for project in expected_projects}
+ {project.slug for project in expected_projects},
)
def test_user_cant_add_other_user_project(self):
@@ -355,11 +348,11 @@ def test_user_cant_add_other_user_project(self):
self.assertFalse(form.is_valid())
self.assertIn(
'Select a valid choice',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
self.assertNotIn(
self.project_f_ar,
- [proj_slug for proj_slug, _ in form.fields['project'].choices]
+ [proj_slug for proj_slug, _ in form.fields['project'].choices],
)
def test_user_cant_add_project_with_same_lang(self):
@@ -371,7 +364,7 @@ def test_user_cant_add_project_with_same_lang(self):
self.assertFalse(form.is_valid())
self.assertIn(
'Both projects can not have the same language (English).',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
def test_user_cant_add_project_with_same_lang_of_other_translation(self):
@@ -386,7 +379,7 @@ def test_user_cant_add_project_with_same_lang_of_other_translation(self):
self.assertFalse(form.is_valid())
self.assertIn(
'This project already has a translation for English.',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
def test_no_nesting_translation(self):
@@ -401,7 +394,7 @@ def test_no_nesting_translation(self):
self.assertFalse(form.is_valid())
self.assertIn(
'Select a valid choice',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
def test_no_nesting_translation_case_2(self):
@@ -416,7 +409,7 @@ def test_no_nesting_translation_case_2(self):
self.assertFalse(form.is_valid())
self.assertIn(
'A project with existing translations can not',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
def test_not_already_translation(self):
@@ -431,7 +424,7 @@ def test_not_already_translation(self):
self.assertFalse(form.is_valid())
self.assertIn(
'is already a translation',
- ''.join(form.errors['project'])
+ ''.join(form.errors['project']),
)
def test_cant_change_language_to_translation_lang(self):
@@ -445,12 +438,12 @@ def test_cant_change_language_to_translation_lang(self):
'documentation_type': 'sphinx',
'language': 'en',
},
- instance=self.project_a_es
+ instance=self.project_a_es,
)
self.assertFalse(form.is_valid())
self.assertIn(
'There is already a "en" translation',
- ''.join(form.errors['language'])
+ ''.join(form.errors['language']),
)
# Translation tries to change lang
@@ -459,12 +452,12 @@ def test_cant_change_language_to_translation_lang(self):
'documentation_type': 'sphinx',
'language': 'es',
},
- instance=self.project_b_en
+ instance=self.project_b_en,
)
self.assertFalse(form.is_valid())
self.assertIn(
'There is already a "es" translation',
- ''.join(form.errors['language'])
+ ''.join(form.errors['language']),
)
# Translation tries to change lang
@@ -474,12 +467,12 @@ def test_cant_change_language_to_translation_lang(self):
'documentation_type': 'sphinx',
'language': 'br',
},
- instance=self.project_b_en
+ instance=self.project_b_en,
)
self.assertFalse(form.is_valid())
self.assertIn(
'There is already a "br" translation',
- ''.join(form.errors['language'])
+ ''.join(form.errors['language']),
)
def test_can_change_language_to_self_lang(self):
@@ -496,7 +489,7 @@ def test_can_change_language_to_self_lang(self):
'documentation_type': 'sphinx',
'language': 'es',
},
- instance=self.project_a_es
+ instance=self.project_a_es,
)
self.assertTrue(form.is_valid())
@@ -509,7 +502,7 @@ def test_can_change_language_to_self_lang(self):
'documentation_type': 'sphinx',
'language': 'en',
},
- instance=self.project_b_en
+ instance=self.project_b_en,
)
self.assertTrue(form.is_valid())
diff --git a/readthedocs/rtd_tests/tests/test_project_querysets.py b/readthedocs/rtd_tests/tests/test_project_querysets.py
index b8d5d9e1f57..01f02e986a3 100644
--- a/readthedocs/rtd_tests/tests/test_project_querysets.py
+++ b/readthedocs/rtd_tests/tests/test_project_querysets.py
@@ -1,13 +1,15 @@
# -*- coding: utf-8 -*-
-from django.contrib.auth.models import User
from datetime import timedelta
import django_dynamic_fixture as fixture
+from django.contrib.auth.models import User
from django.test import TestCase
-from readthedocs.projects.models import Project, Feature
-from readthedocs.projects.querysets import (ParentRelatedProjectQuerySet,
- ChildRelatedProjectQuerySet)
+from readthedocs.projects.models import Feature, Project
+from readthedocs.projects.querysets import (
+ ChildRelatedProjectQuerySet,
+ ParentRelatedProjectQuerySet,
+)
class ProjectQuerySetTests(TestCase):
@@ -22,12 +24,12 @@ def test_subproject_queryset_as_manager_gets_correct_class(self):
mgr = ChildRelatedProjectQuerySet.as_manager()
self.assertEqual(
mgr.__class__.__name__,
- 'ManagerFromChildRelatedProjectQuerySetBase'
+ 'ManagerFromChildRelatedProjectQuerySetBase',
)
mgr = ParentRelatedProjectQuerySet.as_manager()
self.assertEqual(
mgr.__class__.__name__,
- 'ManagerFromParentRelatedProjectQuerySetBase'
+ 'ManagerFromParentRelatedProjectQuerySetBase',
)
def test_is_active(self):
diff --git a/readthedocs/rtd_tests/tests/test_project_symlinks.py b/readthedocs/rtd_tests/tests/test_project_symlinks.py
index 307e4e4516f..b3075167eb4 100644
--- a/readthedocs/rtd_tests/tests/test_project_symlinks.py
+++ b/readthedocs/rtd_tests/tests/test_project_symlinks.py
@@ -1,25 +1,28 @@
# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from builtins import object
import os
import shutil
import tempfile
import mock
from django.conf import settings
-from django.urls import reverse
from django.test import TestCase, override_settings
+from django.urls import reverse
from django_dynamic_fixture import get
from readthedocs.builds.models import Version
-from readthedocs.projects.models import Project, Domain
-from readthedocs.projects.tasks import broadcast_remove_orphan_symlinks, remove_orphan_symlinks, symlink_project
-from readthedocs.core.symlink import PublicSymlink, PrivateSymlink
+from readthedocs.core.symlink import PrivateSymlink, PublicSymlink
+from readthedocs.projects.models import Domain, Project
+from readthedocs.projects.tasks import (
+ broadcast_remove_orphan_symlinks,
+ remove_orphan_symlinks,
+ symlink_project,
+)
def get_filesystem(path, top_level_path=None):
- """Recurse into path, return dictionary mapping of path and files
+ """
+ Recurse into path, return dictionary mapping of path and files.
This will return the path `path` as a nested dictionary of path objects.
Directories are mapped to dictionary objects, file objects will have a
@@ -39,8 +42,10 @@ def get_filesystem(path, top_level_path=None):
if os.path.islink(full_path):
fs[child] = {
'type': 'link',
- 'target': os.path.relpath(os.path.realpath(full_path),
- top_level_path)
+ 'target': os.path.relpath(
+ os.path.realpath(full_path),
+ top_level_path,
+ ),
}
elif os.path.isfile(full_path):
fs[child] = {
@@ -74,47 +79,47 @@ def setUp(self):
self.mocks = {
'PublicSymlinkBase.CNAME_ROOT': mock.patch(
'readthedocs.core.symlink.PublicSymlinkBase.CNAME_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
'PublicSymlinkBase.WEB_ROOT': mock.patch(
'readthedocs.core.symlink.PublicSymlinkBase.WEB_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
'PublicSymlinkBase.PROJECT_CNAME_ROOT': mock.patch(
'readthedocs.core.symlink.PublicSymlinkBase.PROJECT_CNAME_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
'PrivateSymlinkBase.CNAME_ROOT': mock.patch(
'readthedocs.core.symlink.PrivateSymlinkBase.CNAME_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
'PrivateSymlinkBase.WEB_ROOT': mock.patch(
'readthedocs.core.symlink.PrivateSymlinkBase.WEB_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
'PrivateSymlinkBase.PROJECT_CNAME_ROOT': mock.patch(
'readthedocs.core.symlink.PrivateSymlinkBase.PROJECT_CNAME_ROOT',
- new_callable=mock.PropertyMock
+ new_callable=mock.PropertyMock,
),
}
- self.patches = dict((key, mock.start()) for (key, mock) in list(self.mocks.items()))
+ self.patches = {key: mock.start() for (key, mock) in list(self.mocks.items())}
self.patches['PublicSymlinkBase.CNAME_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'public_cname_root'
+ settings.SITE_ROOT, 'public_cname_root',
)
self.patches['PublicSymlinkBase.WEB_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'public_web_root'
+ settings.SITE_ROOT, 'public_web_root',
)
self.patches['PublicSymlinkBase.PROJECT_CNAME_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'public_cname_project'
+ settings.SITE_ROOT, 'public_cname_project',
)
self.patches['PrivateSymlinkBase.CNAME_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'private_cname_root'
+ settings.SITE_ROOT, 'private_cname_root',
)
self.patches['PrivateSymlinkBase.WEB_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'private_web_root'
+ settings.SITE_ROOT, 'private_web_root',
)
self.patches['PrivateSymlinkBase.PROJECT_CNAME_ROOT'].return_value = os.path.join(
- settings.SITE_ROOT, 'private_cname_project'
+ settings.SITE_ROOT, 'private_cname_project',
)
def tearDown(self):
@@ -127,30 +132,34 @@ def assertFilesystem(self, filesystem):
self.assertEqual(filesystem, get_filesystem(settings.SITE_ROOT))
-class BaseSymlinkCnames(object):
+class BaseSymlinkCnames:
def setUp(self):
- super(BaseSymlinkCnames, self).setUp()
- self.project = get(Project, slug='kong', privacy_level=self.privacy,
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.project.save()
self.symlink = self.symlink_class(self.project)
def test_symlink_cname(self):
- self.domain = get(Domain, project=self.project, domain='woot.com',
- url='http://woot.com', cname=True)
+ self.domain = get(
+ Domain, project=self.project, domain='woot.com',
+ url='http://woot.com', cname=True,
+ )
self.symlink.symlink_cnames()
filesystem = {
'private_cname_project': {
- 'woot.com': {'type': 'link', 'target': 'user_builds/kong'}
+ 'woot.com': {'type': 'link', 'target': 'user_builds/kong'},
},
'private_cname_root': {
'woot.com': {'type': 'link', 'target': 'private_web_root/kong'},
},
'private_web_root': {'kong': {'en': {}}},
'public_cname_project': {
- 'woot.com': {'type': 'link', 'target': 'user_builds/kong'}
+ 'woot.com': {'type': 'link', 'target': 'user_builds/kong'},
},
'public_cname_root': {
'woot.com': {'type': 'link', 'target': 'public_web_root/kong'},
@@ -159,8 +168,8 @@ def test_symlink_cname(self):
'kong': {'en': {'latest': {
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
- }}}
- }
+ }}},
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -170,8 +179,10 @@ def test_symlink_cname(self):
self.assertFilesystem(filesystem)
def test_symlink_remove_orphan_symlinks(self):
- self.domain = get(Domain, project=self.project, domain='woot.com',
- url='http://woot.com', cname=True)
+ self.domain = get(
+ Domain, project=self.project, domain='woot.com',
+ url='http://woot.com', cname=True,
+ )
self.symlink.symlink_cnames()
# Editing the Domain and calling save will symlink the new domain and
@@ -201,8 +212,8 @@ def test_symlink_remove_orphan_symlinks(self):
'kong': {'en': {'latest': {
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
- }}}
- }
+ }}},
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -253,20 +264,22 @@ def test_broadcast_remove_orphan_symlinks(self):
)
def test_symlink_cname_dont_link_missing_domains(self):
- """Domains should be relinked after deletion"""
- self.domain = get(Domain, project=self.project, domain='woot.com',
- url='http://woot.com', cname=True)
+ """Domains should be relinked after deletion."""
+ self.domain = get(
+ Domain, project=self.project, domain='woot.com',
+ url='http://woot.com', cname=True,
+ )
self.symlink.symlink_cnames()
filesystem = {
'private_cname_project': {
- 'woot.com': {'type': 'link', 'target': 'user_builds/kong'}
+ 'woot.com': {'type': 'link', 'target': 'user_builds/kong'},
},
'private_cname_root': {
'woot.com': {'type': 'link', 'target': 'private_web_root/kong'},
},
'private_web_root': {'kong': {'en': {}}},
'public_cname_project': {
- 'woot.com': {'type': 'link', 'target': 'user_builds/kong'}
+ 'woot.com': {'type': 'link', 'target': 'user_builds/kong'},
},
'public_cname_root': {
'woot.com': {'type': 'link', 'target': 'public_web_root/kong'},
@@ -275,8 +288,8 @@ def test_symlink_cname_dont_link_missing_domains(self):
'kong': {'en': {'latest': {
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
- }}}
- }
+ }}},
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -303,22 +316,26 @@ class TestPrivateSymlinkCnames(BaseSymlinkCnames, TempSiteRootTestCase):
symlink_class = PrivateSymlink
-class BaseSubprojects(object):
+class BaseSubprojects:
def setUp(self):
- super(BaseSubprojects, self).setUp()
- self.project = get(Project, slug='kong', privacy_level=self.privacy,
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.project.save()
- self.subproject = get(Project, slug='sub', privacy_level=self.privacy,
- main_language_project=None)
+ self.subproject = get(
+ Project, slug='sub', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.subproject.versions.update(privacy_level=self.privacy)
self.subproject.save()
self.symlink = self.symlink_class(self.project)
def test_subproject_normal(self):
- """Symlink pass adds symlink for subproject"""
+ """Symlink pass adds symlink for subproject."""
self.project.add_subproject(self.subproject)
self.symlink.symlink_subprojects()
filesystem = {
@@ -340,16 +357,16 @@ def test_subproject_normal(self):
'sub': {
'type': 'link',
'target': 'public_web_root/sub',
- }
- }
+ },
+ },
},
'sub': {
'en': {'latest': {
'type': 'link',
'target': 'user_builds/sub/rtd-builds/latest',
- }}
- }
- }
+ }},
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -360,7 +377,7 @@ def test_subproject_normal(self):
self.assertFilesystem(filesystem)
def test_subproject_alias(self):
- """Symlink pass adds symlink for subproject alias"""
+ """Symlink pass adds symlink for subproject alias."""
self.project.add_subproject(self.subproject, alias='sweet-alias')
self.symlink.symlink_subprojects()
filesystem = {
@@ -387,15 +404,15 @@ def test_subproject_alias(self):
'type': 'link',
'target': 'public_web_root/sub',
},
- }
+ },
},
'sub': {
'en': {'latest': {
'type': 'link',
'target': 'user_builds/sub/rtd-builds/latest',
- }}
- }
- }
+ }},
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -407,7 +424,7 @@ def test_subproject_alias(self):
self.assertFilesystem(filesystem)
def test_subproject_alias_with_spaces(self):
- """Symlink pass adds symlink for subproject alias"""
+ """Symlink pass adds symlink for subproject alias."""
self.project.add_subproject(self.subproject, alias='Sweet Alias')
self.symlink.symlink_subprojects()
filesystem = {
@@ -434,15 +451,15 @@ def test_subproject_alias_with_spaces(self):
'type': 'link',
'target': 'public_web_root/sub',
},
- }
+ },
},
'sub': {
'en': {'latest': {
'type': 'link',
'target': 'user_builds/sub/rtd-builds/latest',
- }}
- }
- }
+ }},
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -454,7 +471,7 @@ def test_subproject_alias_with_spaces(self):
self.assertFilesystem(filesystem)
def test_remove_subprojects(self):
- """Nonexistent subprojects are unlinked"""
+ """Nonexistent subprojects are unlinked."""
self.project.add_subproject(self.subproject)
self.symlink.symlink_subprojects()
filesystem = {
@@ -476,16 +493,16 @@ def test_remove_subprojects(self):
'sub': {
'type': 'link',
'target': 'public_web_root/sub',
- }
- }
+ },
+ },
},
'sub': {
'en': {'latest': {
'type': 'link',
'target': 'user_builds/sub/rtd-builds/latest',
- }}
- }
- }
+ }},
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -514,29 +531,37 @@ class TestPrivateSubprojects(BaseSubprojects, TempSiteRootTestCase):
symlink_class = PrivateSymlink
-class BaseSymlinkTranslations(object):
+class BaseSymlinkTranslations:
def setUp(self):
- super(BaseSymlinkTranslations, self).setUp()
- self.project = get(Project, slug='kong', privacy_level=self.privacy,
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.project.save()
- self.translation = get(Project, slug='pip', language='de',
- privacy_level=self.privacy,
- main_language_project=None)
+ self.translation = get(
+ Project, slug='pip', language='de',
+ privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.translation.versions.update(privacy_level=self.privacy)
self.translation.save()
self.project.translations.add(self.translation)
self.symlink = self.symlink_class(self.project)
- get(Version, slug='master', verbose_name='master', active=True,
- project=self.project, privacy_level=self.privacy)
- get(Version, slug='master', verbose_name='master', active=True,
- project=self.translation, privacy_level=self.privacy)
+ get(
+ Version, slug='master', verbose_name='master', active=True,
+ project=self.project, privacy_level=self.privacy,
+ )
+ get(
+ Version, slug='master', verbose_name='master', active=True,
+ project=self.translation, privacy_level=self.privacy,
+ )
self.assertIn(self.translation, self.project.translations.all())
def test_symlink_basic(self):
- """Test basic scenario, language english, translation german"""
+ """Test basic scenario, language english, translation german."""
self.symlink.symlink_translations()
filesystem = {
'private_cname_project': {},
@@ -574,9 +599,9 @@ def test_symlink_basic(self):
'type': 'link',
'target': 'user_builds/pip/rtd-builds/master',
},
- }
- }
- }
+ },
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -587,7 +612,7 @@ def test_symlink_basic(self):
self.assertFilesystem(filesystem)
def test_symlink_non_english(self):
- """Test language german, translation english"""
+ """Test language german, translation english."""
self.project.language = 'de'
self.translation.language = 'en'
self.project.save()
@@ -629,9 +654,9 @@ def test_symlink_non_english(self):
'type': 'link',
'target': 'user_builds/pip/rtd-builds/master',
},
- }
- }
- }
+ },
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -642,7 +667,8 @@ def test_symlink_non_english(self):
self.assertFilesystem(filesystem)
def test_symlink_no_english(self):
- """Test language german, no english
+ """
+ Test language german, no english.
This should symlink the translation to 'en' even though there is no 'en'
language in translations or project language
@@ -685,9 +711,9 @@ def test_symlink_no_english(self):
'type': 'link',
'target': 'user_builds/pip/rtd-builds/master',
},
- }
- }
- }
+ },
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -734,9 +760,9 @@ def test_remove_language(self):
'type': 'link',
'target': 'user_builds/pip/rtd-builds/master',
},
- }
- }
- }
+ },
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -766,12 +792,14 @@ class TestPrivateSymlinkTranslations(BaseSymlinkTranslations, TempSiteRootTestCa
symlink_class = PrivateSymlink
-class BaseSymlinkSingleVersion(object):
+class BaseSymlinkSingleVersion:
def setUp(self):
- super(BaseSymlinkSingleVersion, self).setUp()
- self.project = get(Project, slug='kong', privacy_level=self.privacy,
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.project.save()
self.version = self.project.versions.get(slug='latest')
@@ -794,7 +822,7 @@ def test_symlink_single_version(self):
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
},
- }
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -820,8 +848,8 @@ def test_symlink_single_version_missing(self):
'kong': {
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
- }
- }
+ },
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -841,17 +869,21 @@ class TestPublicSymlinkSingleVersion(BaseSymlinkSingleVersion, TempSiteRootTestC
symlink_class = PrivateSymlink
-class BaseSymlinkVersions(object):
+class BaseSymlinkVersions:
def setUp(self):
- super(BaseSymlinkVersions, self).setUp()
- self.project = get(Project, slug='kong', privacy_level=self.privacy,
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', privacy_level=self.privacy,
+ main_language_project=None,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.project.save()
- self.stable = get(Version, slug='stable', verbose_name='stable',
- active=True, project=self.project,
- privacy_level=self.privacy)
+ self.stable = get(
+ Version, slug='stable', verbose_name='stable',
+ active=True, project=self.project,
+ privacy_level=self.privacy,
+ )
self.project.versions.update(privacy_level=self.privacy)
self.symlink = self.symlink_class(self.project)
@@ -878,7 +910,7 @@ def test_symlink_versions(self):
},
},
},
- }
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -908,7 +940,7 @@ def test_removed_versions(self):
'target': 'user_builds/kong/rtd-builds/stable',
},
}},
- }
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -947,7 +979,7 @@ def test_symlink_other_versions(self):
'type': 'link',
'target': 'user_builds/kong/rtd-builds/latest',
}}},
- }
+ },
}
if self.privacy == 'private':
public_root = filesystem['public_web_root'].copy()
@@ -970,12 +1002,16 @@ class TestPrivateSymlinkVersions(BaseSymlinkVersions, TempSiteRootTestCase):
class TestPublicSymlinkUnicode(TempSiteRootTestCase):
def setUp(self):
- super(TestPublicSymlinkUnicode, self).setUp()
- self.project = get(Project, slug='kong', name=u'foo-∫',
- main_language_project=None)
+ super().setUp()
+ self.project = get(
+ Project, slug='kong', name='foo-∫',
+ main_language_project=None,
+ )
self.project.save()
- self.stable = get(Version, slug='foo-a', verbose_name=u'foo-∂',
- active=True, project=self.project)
+ self.stable = get(
+ Version, slug='foo-a', verbose_name='foo-∂',
+ active=True, project=self.project,
+ )
self.symlink = PublicSymlink(self.project)
def test_symlink_no_error(self):
@@ -1035,19 +1071,21 @@ def test_symlink_broadcast_calls_on_project_save(self):
class TestPublicPrivateSymlink(TempSiteRootTestCase):
def setUp(self):
- super(TestPublicPrivateSymlink, self).setUp()
+ super().setUp()
from django.contrib.auth.models import User
self.user = get(User)
self.project = get(
Project, name='project', slug='project', privacy_level='public',
- users=[self.user], main_language_project=None)
+ users=[self.user], main_language_project=None,
+ )
self.project.versions.update(privacy_level='public')
self.project.save()
self.subproject = get(
Project, name='subproject', slug='subproject', privacy_level='public',
- users=[self.user], main_language_project=None)
+ users=[self.user], main_language_project=None,
+ )
self.subproject.versions.update(privacy_level='public')
self.subproject.save()
@@ -1055,8 +1093,8 @@ def test_change_subproject_privacy(self):
"""
Change subproject's ``privacy_level`` creates proper symlinks.
- When the ``privacy_level`` changes in the subprojects, we need to
- re-symlink the superproject also to keep in sync its symlink under the
+ When the ``privacy_level`` changes in the subprojects, we need to re-
+ symlink the superproject also to keep in sync its symlink under the
private/public roots.
"""
filesystem_before = {
@@ -1152,11 +1190,13 @@ def test_change_subproject_privacy(self):
self.client.force_login(self.user)
self.client.post(
- reverse('project_version_detail',
- kwargs={
- 'project_slug': self.subproject.slug,
- 'version_slug': self.subproject.versions.first().slug,
- }),
+ reverse(
+ 'project_version_detail',
+ kwargs={
+ 'project_slug': self.subproject.slug,
+ 'version_slug': self.subproject.versions.first().slug,
+ },
+ ),
data={'privacy_level': 'private', 'active': True},
)
@@ -1164,10 +1204,12 @@ def test_change_subproject_privacy(self):
self.assertTrue(self.subproject.versions.first().active)
self.client.post(
- reverse('projects_advanced',
- kwargs={
- 'project_slug': self.subproject.slug,
- }),
+ reverse(
+ 'projects_advanced',
+ kwargs={
+ 'project_slug': self.subproject.slug,
+ },
+ ),
data={
# Required defaults
'python_interpreter': 'python',
diff --git a/readthedocs/rtd_tests/tests/test_project_views.py b/readthedocs/rtd_tests/tests/test_project_views.py
index cf437f29d62..0e7225ac65e 100644
--- a/readthedocs/rtd_tests/tests/test_project_views.py
+++ b/readthedocs/rtd_tests/tests/test_project_views.py
@@ -1,28 +1,28 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
from datetime import timedelta
-
-from mock import patch
-from django.test import TestCase
from django.contrib.auth.models import User
from django.contrib.messages import constants as message_const
-from django.urls import reverse
from django.http.response import HttpResponseRedirect
-from django.views.generic.base import ContextMixin
+from django.test import TestCase
+from django.urls import reverse
from django.utils import timezone
+from django.views.generic.base import ContextMixin
from django_dynamic_fixture import get, new
-
-import six
+from mock import patch
from readthedocs.builds.models import Build, Version
-from readthedocs.rtd_tests.base import (WizardTestCase, MockBuildTestCase,
- RequestFactoryTestMixin)
from readthedocs.oauth.models import RemoteRepository
+from readthedocs.projects import tasks
from readthedocs.projects.exceptions import ProjectSpamError
-from readthedocs.projects.models import Project, Domain
-from readthedocs.projects.views.private import ImportWizardView
+from readthedocs.projects.models import Domain, Project
from readthedocs.projects.views.mixins import ProjectRelationMixin
-from readthedocs.projects import tasks
+from readthedocs.projects.views.private import ImportWizardView
+from readthedocs.rtd_tests.base import (
+ MockBuildTestCase,
+ RequestFactoryTestMixin,
+ WizardTestCase,
+)
@patch('readthedocs.projects.views.private.trigger_build', lambda x: None)
@@ -32,7 +32,7 @@ class TestProfileMiddleware(RequestFactoryTestMixin, TestCase):
url = '/dashboard/import/manual/'
def setUp(self):
- super(TestProfileMiddleware, self).setUp()
+ super().setUp()
data = {
'basics': {
'name': 'foobar',
@@ -47,12 +47,12 @@ def setUp(self):
}
self.data = {}
for key in data:
- self.data.update({('{0}-{1}'.format(key, k), v)
+ self.data.update({('{}-{}'.format(key, k), v)
for (k, v) in list(data[key].items())})
- self.data['{0}-current_step'.format(self.wizard_class_slug)] = 'extra'
+ self.data['{}-current_step'.format(self.wizard_class_slug)] = 'extra'
def test_profile_middleware_no_profile(self):
- """User without profile and isn't banned"""
+ """User without profile and isn't banned."""
req = self.request('/projects/import', method='post', data=self.data)
req.user = get(User, profile=None)
resp = ImportWizardView.as_view()(req)
@@ -61,7 +61,7 @@ def test_profile_middleware_no_profile(self):
@patch('readthedocs.projects.views.private.ProjectBasicsForm.clean')
def test_profile_middleware_spam(self, form):
- """User will be banned"""
+ """User will be banned."""
form.side_effect = ProjectSpamError
req = self.request('/projects/import', method='post', data=self.data)
req.user = get(User)
@@ -71,7 +71,7 @@ def test_profile_middleware_spam(self, form):
self.assertTrue(req.user.profile.banned)
def test_profile_middleware_banned(self):
- """User is banned"""
+ """User is banned."""
req = self.request('/projects/import', method='post', data=self.data)
req.user = get(User)
req.user.profile.banned = True
@@ -101,10 +101,10 @@ def tearDown(self):
def request(self, *args, **kwargs):
kwargs['user'] = self.user
- return super(TestBasicsForm, self).request(*args, **kwargs)
+ return super().request(*args, **kwargs)
def test_form_pass(self):
- """Only submit the basics"""
+ """Only submit the basics."""
resp = self.post_step('basics')
self.assertIsInstance(resp, HttpResponseRedirect)
self.assertEqual(resp.status_code, 302)
@@ -136,7 +136,7 @@ def test_remote_repository_is_not_added_for_wrong_user(self):
self.assertWizardFailure(resp, 'remote_repository')
def test_form_missing(self):
- """Submit form with missing data, expect to get failures"""
+ """Submit form with missing data, expect to get failures."""
self.step_data['basics'] = {'advanced': True}
resp = self.post_step('basics')
self.assertWizardFailure(resp, 'name')
@@ -146,7 +146,7 @@ def test_form_missing(self):
class TestAdvancedForm(TestBasicsForm):
def setUp(self):
- super(TestAdvancedForm, self).setUp()
+ super().setUp()
self.step_data['basics']['advanced'] = True
self.step_data['extra'] = {
'description': 'Describe foobar',
@@ -156,7 +156,7 @@ def setUp(self):
}
def test_form_pass(self):
- """Test all forms pass validation"""
+ """Test all forms pass validation."""
resp = self.post_step('basics')
self.assertWizardResponse(resp, 'extra')
resp = self.post_step('extra', session=list(resp._request.session.items()))
@@ -169,16 +169,16 @@ def test_form_pass(self):
data = self.step_data['basics']
del data['advanced']
del self.step_data['extra']['tags']
- six.assertCountEqual(
- self,
+ self.assertCountEqual(
[tag.name for tag in proj.tags.all()],
- [u'bar', u'baz', u'foo'])
+ ['bar', 'baz', 'foo'],
+ )
data.update(self.step_data['extra'])
for (key, val) in list(data.items()):
self.assertEqual(getattr(proj, key), val)
def test_form_missing_extra(self):
- """Submit extra form with missing data, expect to get failures"""
+ """Submit extra form with missing data, expect to get failures."""
# Remove extra data to trigger validation errors
self.step_data['extra'] = {}
@@ -203,10 +203,12 @@ def test_remote_repository_is_added(self):
self.assertIsNotNone(proj)
self.assertEqual(proj.remote_repository, remote_repo)
- @patch('readthedocs.projects.views.private.ProjectExtraForm.clean_description',
- create=True)
+ @patch(
+ 'readthedocs.projects.views.private.ProjectExtraForm.clean_description',
+ create=True,
+ )
def test_form_spam(self, mocked_validator):
- """Don't add project on a spammy description"""
+ """Don't add project on a spammy description."""
self.user.date_joined = timezone.now() - timedelta(days=365)
self.user.save()
mocked_validator.side_effect = ProjectSpamError
@@ -225,10 +227,12 @@ def test_form_spam(self, mocked_validator):
proj = Project.objects.get(name='foobar')
self.assertFalse(self.user.profile.banned)
- @patch('readthedocs.projects.views.private.ProjectExtraForm.clean_description',
- create=True)
+ @patch(
+ 'readthedocs.projects.views.private.ProjectExtraForm.clean_description',
+ create=True,
+ )
def test_form_spam_ban_user(self, mocked_validator):
- """Don't add spam and ban new user"""
+ """Don't add spam and ban new user."""
self.user.date_joined = timezone.now()
self.user.save()
mocked_validator.side_effect = ProjectSpamError
@@ -249,7 +253,7 @@ def test_form_spam_ban_user(self, mocked_validator):
class TestImportDemoView(MockBuildTestCase):
- """Test project import demo view"""
+ """Test project import demo view."""
fixtures = ['test_data', 'eric']
@@ -266,7 +270,7 @@ def test_import_demo_pass(self):
self.assertEqual(messages[0].level, message_const.SUCCESS)
def test_import_demo_already_imported(self):
- """Import demo project multiple times, expect failure 2nd post"""
+ """Import demo project multiple times, expect failure 2nd post."""
self.test_import_demo_pass()
project = Project.objects.get(slug='eric-demo')
@@ -279,11 +283,13 @@ def test_import_demo_already_imported(self):
messages = list(resp_redir.context['messages'])
self.assertEqual(messages[0].level, message_const.SUCCESS)
- self.assertEqual(project,
- Project.objects.get(slug='eric-demo'))
+ self.assertEqual(
+ project,
+ Project.objects.get(slug='eric-demo'),
+ )
def test_import_demo_another_user_imported(self):
- """Import demo project after another user, expect success"""
+ """Import demo project after another user, expect success."""
self.test_import_demo_pass()
project = Project.objects.get(slug='eric-demo')
@@ -299,7 +305,7 @@ def test_import_demo_another_user_imported(self):
self.assertEqual(messages[0].level, message_const.SUCCESS)
def test_import_demo_imported_renamed(self):
- """If the demo project is renamed, don't import another"""
+ """If the demo project is renamed, don't import another."""
self.test_import_demo_pass()
project = Project.objects.get(slug='eric-demo')
project.name = 'eric-demo-foobar'
@@ -313,14 +319,19 @@ def test_import_demo_imported_renamed(self):
self.assertEqual(resp_redir.status_code, 200)
messages = list(resp_redir.context['messages'])
self.assertEqual(messages[0].level, message_const.SUCCESS)
- self.assertRegex(messages[0].message,
- r'already imported')
+ self.assertRegex(
+ messages[0].message,
+ r'already imported',
+ )
- self.assertEqual(project,
- Project.objects.get(slug='eric-demo'))
+ self.assertEqual(
+ project,
+ Project.objects.get(slug='eric-demo'),
+ )
def test_import_demo_imported_duplicate(self):
- """If a project exists with same name, expect a failure importing demo
+ """
+ If a project exists with same name, expect a failure importing demo.
This should be edge case, user would have to import a project (not the
demo project), named user-demo, and then manually enter the demo import
@@ -339,11 +350,15 @@ def test_import_demo_imported_duplicate(self):
self.assertEqual(resp_redir.status_code, 200)
messages = list(resp_redir.context['messages'])
self.assertEqual(messages[0].level, message_const.ERROR)
- self.assertRegex(messages[0].message,
- r'There was a problem')
+ self.assertRegex(
+ messages[0].message,
+ r'There was a problem',
+ )
- self.assertEqual(project,
- Project.objects.get(slug='eric-demo'))
+ self.assertEqual(
+ project,
+ Project.objects.get(slug='eric-demo'),
+ )
class TestPrivateViews(MockBuildTestCase):
@@ -380,7 +395,8 @@ def test_delete_project(self):
broadcast.assert_called_with(
type='app',
task=tasks.remove_dirs,
- args=[(project.doc_path,)])
+ args=[(project.doc_path,)],
+ )
def test_subproject_create(self):
project = get(Project, slug='pip', users=[self.user])
@@ -395,7 +411,8 @@ def test_subproject_create(self):
broadcast.assert_called_with(
type='app',
task=tasks.symlink_subproject,
- args=[project.pk])
+ args=[project.pk],
+ )
class TestPrivateMixins(MockBuildTestCase):
@@ -405,7 +422,7 @@ def setUp(self):
self.domain = get(Domain, project=self.project)
def test_project_relation(self):
- """Class using project relation mixin class"""
+ """Class using project relation mixin class."""
class FoobarView(ProjectRelationMixin, ContextMixin):
model = Domain
diff --git a/readthedocs/rtd_tests/tests/test_redirects.py b/readthedocs/rtd_tests/tests/test_redirects.py
index 335c8feba86..6fbafebf3bc 100644
--- a/readthedocs/rtd_tests/tests/test_redirects.py
+++ b/readthedocs/rtd_tests/tests/test_redirects.py
@@ -1,10 +1,10 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+import logging
+
from django.http import Http404
from django.test import TestCase
from django.test.utils import override_settings
-
-from django_dynamic_fixture import get
-from django_dynamic_fixture import fixture
+from django_dynamic_fixture import fixture, get
from mock import patch
from readthedocs.builds.constants import LATEST
@@ -12,28 +12,29 @@
from readthedocs.projects.models import Project
from readthedocs.redirects.models import Redirect
-import logging
-
@override_settings(PUBLIC_DOMAIN='readthedocs.org', USE_SUBDOMAIN=False, APPEND_SLASH=False)
class RedirectTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
logging.disable(logging.DEBUG)
self.client.login(username='eric', password='test')
self.client.post(
'/dashboard/import/',
- {'repo_type': 'git', 'name': 'Pip',
- 'tags': 'big, fucking, monkey', 'default_branch': '',
- 'project_url': 'http://pip.rtfd.org',
- 'repo': 'https://github.com/fail/sauce',
- 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
- 'default_version': LATEST,
- 'privacy_level': 'public',
- 'version_privacy_level': 'public',
- 'description': 'wat',
- 'documentation_type': 'sphinx'})
+ {
+ 'repo_type': 'git', 'name': 'Pip',
+ 'tags': 'big, fucking, monkey', 'default_branch': '',
+ 'project_url': 'http://pip.rtfd.org',
+ 'repo': 'https://github.com/fail/sauce',
+ 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
+ 'default_version': LATEST,
+ 'privacy_level': 'public',
+ 'version_privacy_level': 'public',
+ 'description': 'wat',
+ 'documentation_type': 'sphinx',
+ },
+ )
pip = Project.objects.get(slug='pip')
pip.versions.create_latest()
@@ -45,14 +46,17 @@ def test_proper_url(self):
r = self.client.get('/docs/pip/')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://readthedocs.org/docs/pip/en/latest/')
+ r['Location'], 'http://readthedocs.org/docs/pip/en/latest/',
+ )
# Specific Page Redirects
def test_proper_page_on_main_site(self):
r = self.client.get('/docs/pip/page/test.html')
self.assertEqual(r.status_code, 302)
- self.assertEqual(r['Location'],
- 'http://readthedocs.org/docs/pip/en/latest/test.html')
+ self.assertEqual(
+ r['Location'],
+ 'http://readthedocs.org/docs/pip/en/latest/test.html',
+ )
# If slug is neither valid lang nor valid version, it should 404.
# TODO: This should 404 directly, not redirect first
@@ -90,15 +94,18 @@ def test_proper_subdomain(self):
r = self.client.get('/', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/',
+ )
# Specific Page Redirects
@override_settings(USE_SUBDOMAIN=True)
def test_proper_page_on_subdomain(self):
r = self.client.get('/page/test.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
- self.assertEqual(r['Location'],
- 'http://pip.readthedocs.org/en/latest/test.html')
+ self.assertEqual(
+ r['Location'],
+ 'http://pip.readthedocs.org/en/latest/test.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_improper_subdomain_filename_only(self):
@@ -108,22 +115,25 @@ def test_improper_subdomain_filename_only(self):
@override_settings(PUBLIC_DOMAIN='readthedocs.org', USE_SUBDOMAIN=False)
class RedirectAppTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
self.client.login(username='eric', password='test')
self.client.post(
'/dashboard/import/',
- {'repo_type': 'git', 'name': 'Pip',
- 'tags': 'big, fucking, monkey', 'default_branch': '',
- 'project_url': 'http://pip.rtfd.org',
- 'repo': 'https://github.com/fail/sauce',
- 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
- 'default_version': LATEST,
- 'privacy_level': 'public',
- 'version_privacy_level': 'public',
- 'description': 'wat',
- 'documentation_type': 'sphinx'})
+ {
+ 'repo_type': 'git', 'name': 'Pip',
+ 'tags': 'big, fucking, monkey', 'default_branch': '',
+ 'project_url': 'http://pip.rtfd.org',
+ 'repo': 'https://github.com/fail/sauce',
+ 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
+ 'default_version': LATEST,
+ 'privacy_level': 'public',
+ 'version_privacy_level': 'public',
+ 'description': 'wat',
+ 'documentation_type': 'sphinx',
+ },
+ )
self.pip = Project.objects.get(slug='pip')
self.pip.versions.create_latest()
@@ -145,12 +155,14 @@ def test_redirect_prefix_infinite(self):
r = self.client.get('/redirect', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/redirect.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/redirect.html',
+ )
r = self.client.get('/redirect/', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/redirect/')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/redirect/',
+ )
r = self.client.get('/en/latest/redirect/', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 404)
@@ -158,33 +170,37 @@ def test_redirect_prefix_infinite(self):
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_root(self):
Redirect.objects.create(
- project=self.pip, redirect_type='prefix', from_url='/woot/')
+ project=self.pip, redirect_type='prefix', from_url='/woot/',
+ )
r = self.client.get('/woot/faq.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_page(self):
Redirect.objects.create(
project=self.pip, redirect_type='page',
- from_url='/install.html', to_url='/tutorial/install.html'
+ from_url='/install.html', to_url='/tutorial/install.html',
)
r = self.client.get('/install.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/tutorial/install.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/tutorial/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_exact(self):
Redirect.objects.create(
project=self.pip, redirect_type='exact',
- from_url='/en/latest/install.html', to_url='/en/latest/tutorial/install.html'
+ from_url='/en/latest/install.html', to_url='/en/latest/tutorial/install.html',
)
r = self.client.get('/en/latest/install.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/tutorial/install.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/tutorial/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_exact_with_rest(self):
@@ -202,7 +218,8 @@ def test_redirect_exact_with_rest(self):
r = self.client.get('/en/latest/guides/install.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/version/guides/install.html')
+ r['Location'], 'http://pip.readthedocs.org/en/version/guides/install.html',
+ )
Redirect.objects.create(
project=self.pip, redirect_type='exact',
@@ -211,7 +228,8 @@ def test_redirect_exact_with_rest(self):
r = self.client.get('/es/version/guides/install.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/master/guides/install.html')
+ r['Location'], 'http://pip.readthedocs.org/en/master/guides/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_inactive_version(self):
@@ -236,75 +254,94 @@ def test_redirect_inactive_version(self):
r = self.client.get('/en/oldversion/', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/newversion/')
+ r['Location'], 'http://pip.readthedocs.org/en/newversion/',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_keeps_version_number(self):
Redirect.objects.create(
project=self.pip, redirect_type='page',
- from_url='/how_to_install.html', to_url='/install.html')
+ from_url='/how_to_install.html', to_url='/install.html',
+ )
with patch('readthedocs.core.views.serve._serve_symlink_docs') as _serve_docs:
_serve_docs.side_effect = Http404()
- r = self.client.get('/en/0.8.1/how_to_install.html',
- HTTP_HOST='pip.readthedocs.org')
+ r = self.client.get(
+ '/en/0.8.1/how_to_install.html',
+ HTTP_HOST='pip.readthedocs.org',
+ )
self.assertEqual(r.status_code, 302)
self.assertEqual(
r['Location'],
- 'http://pip.readthedocs.org/en/0.8.1/install.html')
+ 'http://pip.readthedocs.org/en/0.8.1/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_keeps_language(self):
Redirect.objects.create(
project=self.pip, redirect_type='page',
- from_url='/how_to_install.html', to_url='/install.html')
+ from_url='/how_to_install.html', to_url='/install.html',
+ )
with patch('readthedocs.core.views.serve._serve_symlink_docs') as _serve_docs:
_serve_docs.side_effect = Http404()
- r = self.client.get('/de/0.8.1/how_to_install.html',
- HTTP_HOST='pip.readthedocs.org')
+ r = self.client.get(
+ '/de/0.8.1/how_to_install.html',
+ HTTP_HOST='pip.readthedocs.org',
+ )
self.assertEqual(r.status_code, 302)
self.assertEqual(
r['Location'],
- 'http://pip.readthedocs.org/de/0.8.1/install.html')
+ 'http://pip.readthedocs.org/de/0.8.1/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True)
def test_redirect_recognizes_custom_cname(self):
Redirect.objects.create(
project=self.pip, redirect_type='page', from_url='/install.html',
- to_url='/tutorial/install.html')
- r = self.client.get('/install.html',
- HTTP_HOST='pip.pypa.io',
- HTTP_X_RTD_SLUG='pip')
+ to_url='/tutorial/install.html',
+ )
+ r = self.client.get(
+ '/install.html',
+ HTTP_HOST='pip.pypa.io',
+ HTTP_X_RTD_SLUG='pip',
+ )
self.assertEqual(r.status_code, 302)
self.assertEqual(
r['Location'],
- 'http://pip.pypa.io/en/latest/tutorial/install.html')
+ 'http://pip.pypa.io/en/latest/tutorial/install.html',
+ )
@override_settings(USE_SUBDOMAIN=True, PYTHON_MEDIA=True)
def test_redirect_html(self):
Redirect.objects.create(
- project=self.pip, redirect_type='sphinx_html')
+ project=self.pip, redirect_type='sphinx_html',
+ )
r = self.client.get('/en/latest/faq/', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html',
+ )
@override_settings(USE_SUBDOMAIN=True, PYTHON_MEDIA=True)
def test_redirect_html_index(self):
Redirect.objects.create(
- project=self.pip, redirect_type='sphinx_html')
+ project=self.pip, redirect_type='sphinx_html',
+ )
r = self.client.get('/en/latest/faq/index.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/faq.html',
+ )
@override_settings(USE_SUBDOMAIN=True, PYTHON_MEDIA=True)
def test_redirect_htmldir(self):
Redirect.objects.create(
- project=self.pip, redirect_type='sphinx_htmldir')
+ project=self.pip, redirect_type='sphinx_htmldir',
+ )
r = self.client.get('/en/latest/faq.html', HTTP_HOST='pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(
- r['Location'], 'http://pip.readthedocs.org/en/latest/faq/')
+ r['Location'], 'http://pip.readthedocs.org/en/latest/faq/',
+ )
class CustomRedirectTests(TestCase):
@@ -339,14 +376,16 @@ def test_redirect_fragment(self):
@override_settings(PUBLIC_DOMAIN='readthedocs.org', USE_SUBDOMAIN=False)
class RedirectBuildTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
- self.project = get(Project,
- slug='project-1',
- documentation_type='sphinx',
- conf_py_file='test_conf.py',
- versions=[fixture()])
+ self.project = get(
+ Project,
+ slug='project-1',
+ documentation_type='sphinx',
+ conf_py_file='test_conf.py',
+ versions=[fixture()],
+ )
self.version = self.project.versions.all()[0]
def test_redirect_list(self):
@@ -362,46 +401,46 @@ def test_redirect_detail(self):
@override_settings(PUBLIC_DOMAIN='readthedocs.org', USE_SUBDOMAIN=False)
class GetFullPathTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
- self.proj = Project.objects.get(slug="read-the-docs")
+ self.proj = Project.objects.get(slug='read-the-docs')
self.redirect = get(Redirect, project=self.proj)
def test_http_filenames_return_themselves(self):
self.assertEqual(
self.redirect.get_full_path('http://rtfd.org'),
- 'http://rtfd.org'
+ 'http://rtfd.org',
)
def test_redirects_no_subdomain(self):
self.assertEqual(
self.redirect.get_full_path('index.html'),
- '/docs/read-the-docs/en/latest/'
+ '/docs/read-the-docs/en/latest/',
)
@override_settings(
- USE_SUBDOMAIN=True, PRODUCTION_DOMAIN='rtfd.org'
+ USE_SUBDOMAIN=True, PRODUCTION_DOMAIN='rtfd.org',
)
def test_redirects_with_subdomain(self):
self.assertEqual(
self.redirect.get_full_path('faq.html'),
- '/en/latest/faq.html'
+ '/en/latest/faq.html',
)
@override_settings(
- USE_SUBDOMAIN=True, PRODUCTION_DOMAIN='rtfd.org'
+ USE_SUBDOMAIN=True, PRODUCTION_DOMAIN='rtfd.org',
)
def test_single_version_with_subdomain(self):
self.redirect.project.single_version = True
self.assertEqual(
self.redirect.get_full_path('faq.html'),
- '/faq.html'
+ '/faq.html',
)
def test_single_version_no_subdomain(self):
self.redirect.project.single_version = True
self.assertEqual(
self.redirect.get_full_path('faq.html'),
- '/docs/read-the-docs/faq.html'
+ '/docs/read-the-docs/faq.html',
)
diff --git a/readthedocs/rtd_tests/tests/test_repo_parsing.py b/readthedocs/rtd_tests/tests/test_repo_parsing.py
index f946db61e53..85ffbbf9997 100644
--- a/readthedocs/rtd_tests/tests/test_repo_parsing.py
+++ b/readthedocs/rtd_tests/tests/test_repo_parsing.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
from django.test import TestCase
from readthedocs.projects.models import Project
diff --git a/readthedocs/rtd_tests/tests/test_resolver.py b/readthedocs/rtd_tests/tests/test_resolver.py
index 1ef55d564ca..81f2f3ef5ab 100644
--- a/readthedocs/rtd_tests/tests/test_resolver.py
+++ b/readthedocs/rtd_tests/tests/test_resolver.py
@@ -1,13 +1,13 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
-
import django_dynamic_fixture as fixture
import mock
from django.test import TestCase, override_settings
from readthedocs.core.resolver import (
- Resolver, resolve, resolve_domain, resolve_path
+ Resolver,
+ resolve,
+ resolve_domain,
+ resolve_path,
)
from readthedocs.projects.constants import PRIVATE
from readthedocs.projects.models import Domain, Project, ProjectRelationship
@@ -67,7 +67,8 @@ def test_resolver_filename_index(self):
url = resolve_path(project=self.pip, filename='foo/bar/index.html')
self.assertEqual(url, '/docs/pip/en/latest/foo/bar/')
url = resolve_path(
- project=self.pip, filename='foo/index/index.html')
+ project=self.pip, filename='foo/index/index.html',
+ )
self.assertEqual(url, '/docs/pip/en/latest/foo/index/')
def test_resolver_filename_false_index(self):
@@ -75,9 +76,11 @@ def test_resolver_filename_false_index(self):
url = resolve_path(project=self.pip, filename='foo/foo_index.html')
self.assertEqual(url, '/docs/pip/en/latest/foo/foo_index.html')
url = resolve_path(
- project=self.pip, filename='foo_index/foo_index.html')
+ project=self.pip, filename='foo_index/foo_index.html',
+ )
self.assertEqual(
- url, '/docs/pip/en/latest/foo_index/foo_index.html')
+ url, '/docs/pip/en/latest/foo_index/foo_index.html',
+ )
def test_resolver_filename_sphinx(self):
self.pip.documentation_type = 'sphinx'
@@ -173,21 +176,25 @@ def test_resolver_force_single_version(self):
self.pip.single_version = False
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
- project=self.pip, filename='index.html', single_version=True)
+ project=self.pip, filename='index.html', single_version=True,
+ )
self.assertEqual(url, '/docs/pip/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
- project=self.pip, filename='index.html', single_version=True)
+ project=self.pip, filename='index.html', single_version=True,
+ )
self.assertEqual(url, '/')
def test_resolver_force_domain(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
- project=self.pip, filename='index.html', cname=True)
+ project=self.pip, filename='index.html', cname=True,
+ )
self.assertEqual(url, '/en/latest/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
- project=self.pip, filename='index.html', cname=True)
+ project=self.pip, filename='index.html', cname=True,
+ )
self.assertEqual(url, '/en/latest/')
def test_resolver_force_domain_single_version(self):
@@ -195,66 +202,78 @@ def test_resolver_force_domain_single_version(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
project=self.pip, filename='index.html', single_version=True,
- cname=True)
+ cname=True,
+ )
self.assertEqual(url, '/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
project=self.pip, filename='index.html', single_version=True,
- cname=True)
+ cname=True,
+ )
self.assertEqual(url, '/')
def test_resolver_force_language(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
- project=self.pip, filename='index.html', language='cz')
+ project=self.pip, filename='index.html', language='cz',
+ )
self.assertEqual(url, '/docs/pip/cz/latest/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
- project=self.pip, filename='index.html', language='cz')
+ project=self.pip, filename='index.html', language='cz',
+ )
self.assertEqual(url, '/cz/latest/')
def test_resolver_force_version(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
- project=self.pip, filename='index.html', version_slug='foo')
+ project=self.pip, filename='index.html', version_slug='foo',
+ )
self.assertEqual(url, '/docs/pip/en/foo/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
- project=self.pip, filename='index.html', version_slug='foo')
+ project=self.pip, filename='index.html', version_slug='foo',
+ )
self.assertEqual(url, '/en/foo/')
def test_resolver_force_language_version(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
project=self.pip, filename='index.html', language='cz',
- version_slug='foo')
+ version_slug='foo',
+ )
self.assertEqual(url, '/docs/pip/cz/foo/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
project=self.pip, filename='index.html', language='cz',
- version_slug='foo')
+ version_slug='foo',
+ )
self.assertEqual(url, '/cz/foo/')
def test_resolver_no_force_translation(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
- project=self.translation, filename='index.html', language='cz')
+ project=self.translation, filename='index.html', language='cz',
+ )
self.assertEqual(url, '/docs/pip/ja/latest/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
- project=self.translation, filename='index.html', language='cz')
+ project=self.translation, filename='index.html', language='cz',
+ )
self.assertEqual(url, '/ja/latest/')
def test_resolver_no_force_translation_with_version(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_path(
project=self.translation, filename='index.html', language='cz',
- version_slug='foo')
+ version_slug='foo',
+ )
self.assertEqual(url, '/docs/pip/ja/foo/')
with override_settings(USE_SUBDOMAIN=True):
url = resolve_path(
project=self.translation, filename='index.html', language='cz',
- version_slug='foo')
+ version_slug='foo',
+ )
self.assertEqual(url, '/ja/foo/')
@@ -274,7 +293,7 @@ def test_project_with_same_translation_and_main_language(self):
proj1.save()
self.assertEqual(
proj1.main_language_project.main_language_project,
- proj1
+ proj1,
)
# This tests that we aren't going to re-recurse back to resolving proj1
@@ -428,7 +447,8 @@ def test_domain_resolver_translation_itself(self):
@override_settings(
PRODUCTION_DOMAIN='readthedocs.org',
- PUBLIC_DOMAIN='public.readthedocs.org')
+ PUBLIC_DOMAIN='public.readthedocs.org',
+ )
def test_domain_public(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve_domain(project=self.translation)
@@ -492,11 +512,13 @@ def test_resolver_subproject(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve(project=self.subproject)
self.assertEqual(
- url, 'http://readthedocs.org/docs/pip/projects/sub/ja/latest/')
+ url, 'http://readthedocs.org/docs/pip/projects/sub/ja/latest/',
+ )
with override_settings(USE_SUBDOMAIN=True):
url = resolve(project=self.subproject)
self.assertEqual(
- url, 'http://pip.readthedocs.org/projects/sub/ja/latest/')
+ url, 'http://pip.readthedocs.org/projects/sub/ja/latest/',
+ )
@override_settings(PRODUCTION_DOMAIN='readthedocs.org')
def test_resolver_translation(self):
@@ -576,7 +598,8 @@ def test_resolver_private_version_override(self):
@override_settings(
PRODUCTION_DOMAIN='readthedocs.org',
- PUBLIC_DOMAIN='public.readthedocs.org')
+ PUBLIC_DOMAIN='public.readthedocs.org',
+ )
def test_resolver_public_domain_overrides(self):
with override_settings(USE_SUBDOMAIN=False):
url = resolve(project=self.pip, private=True)
@@ -586,10 +609,12 @@ def test_resolver_public_domain_overrides(self):
with override_settings(USE_SUBDOMAIN=True):
url = resolve(project=self.pip, private=True)
self.assertEqual(
- url, 'http://pip.public.readthedocs.org/en/latest/')
+ url, 'http://pip.public.readthedocs.org/en/latest/',
+ )
url = resolve(project=self.pip, private=False)
self.assertEqual(
- url, 'http://pip.public.readthedocs.org/en/latest/')
+ url, 'http://pip.public.readthedocs.org/en/latest/',
+ )
# Domain overrides PUBLIC_DOMAIN
self.domain = fixture.get(
@@ -627,7 +652,7 @@ def test_resolver_domain_https(self):
self.assertEqual(url, 'http://pip.readthedocs.io/en/latest/')
-class ResolverAltSetUp(object):
+class ResolverAltSetUp:
def setUp(self):
with mock.patch('readthedocs.projects.models.broadcast'):
@@ -720,20 +745,24 @@ def test_subproject_with_translation_without_custom_domain(self):
self.assertEqual(
url, 'http://{project.slug}.readthedocs.io/en/latest/'.format(
project=self.superproject_en,
- ))
+ ),
+ )
url = resolve(self.superproject_es, filename='')
self.assertEqual(
url, 'http://{project.slug}.readthedocs.io/es/latest/'.format(
project=self.superproject_en,
- ))
+ ),
+ )
url = resolve(self.subproject_en, filename='')
# yapf: disable
self.assertEqual(
url,
- ('http://{project.slug}.readthedocs.io/projects/'
- '{subproject.slug}/en/latest/').format(
+ (
+ 'http://{project.slug}.readthedocs.io/projects/'
+ '{subproject.slug}/en/latest/'
+ ).format(
project=self.superproject_en,
subproject=self.subproject_en,
),
@@ -742,8 +771,10 @@ def test_subproject_with_translation_without_custom_domain(self):
url = resolve(self.subproject_es, filename='')
self.assertEqual(
url,
- ('http://{project.slug}.readthedocs.io/projects/'
- '{subproject.slug}/es/latest/').format(
+ (
+ 'http://{project.slug}.readthedocs.io/projects/'
+ '{subproject.slug}/es/latest/'
+ ).format(
project=self.superproject_en,
subproject=self.subproject_en,
),
@@ -770,8 +801,10 @@ def test_subproject_with_translation_with_custom_domain(self):
url = resolve(self.subproject_en, filename='')
self.assertEqual(
url,
- ('http://docs.example.com/projects/'
- '{subproject.slug}/en/latest/').format(
+ (
+ 'http://docs.example.com/projects/'
+ '{subproject.slug}/en/latest/'
+ ).format(
subproject=self.subproject_en,
),
)
@@ -779,8 +812,10 @@ def test_subproject_with_translation_with_custom_domain(self):
url = resolve(self.subproject_es, filename='')
self.assertEqual(
url,
- ('http://docs.example.com/projects/'
- '{subproject.slug}/es/latest/').format(
+ (
+ 'http://docs.example.com/projects/'
+ '{subproject.slug}/es/latest/'
+ ).format(
subproject=self.subproject_en,
),
)
diff --git a/readthedocs/rtd_tests/tests/test_restapi_client.py b/readthedocs/rtd_tests/tests/test_restapi_client.py
index 88906fdc10f..cf555b74883 100644
--- a/readthedocs/rtd_tests/tests/test_restapi_client.py
+++ b/readthedocs/rtd_tests/tests/test_restapi_client.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import, unicode_literals)
-
from django.test import TestCase
from readthedocs.restapi.client import DrfJsonSerializer
@@ -9,7 +6,7 @@
class TestDrfJsonSerializer(TestCase):
data = {
- 'proper': 'json'
+ 'proper': 'json',
}
serialized_data = '{"proper":"json"}'
diff --git a/readthedocs/rtd_tests/tests/test_search_json_parsing.py b/readthedocs/rtd_tests/tests/test_search_json_parsing.py
index fb91d31b276..42b0839c4e9 100644
--- a/readthedocs/rtd_tests/tests/test_search_json_parsing.py
+++ b/readthedocs/rtd_tests/tests/test_search_json_parsing.py
@@ -1,10 +1,11 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
import os
from django.test import TestCase
from readthedocs.search.parse_json import process_file
+
base_dir = os.path.dirname(os.path.dirname(__file__))
class TestHacks(TestCase):
@@ -14,7 +15,7 @@ def test_h2_parsing(self):
os.path.join(
base_dir,
'files/api.fjson',
- )
+ ),
)
self.assertEqual(data['sections'][1]['id'], 'a-basic-api-client-using-slumber')
# Only capture h2's after the first section
diff --git a/readthedocs/rtd_tests/tests/test_single_version.py b/readthedocs/rtd_tests/tests/test_single_version.py
index faee5f585f7..e69b80375af 100644
--- a/readthedocs/rtd_tests/tests/test_single_version.py
+++ b/readthedocs/rtd_tests/tests/test_single_version.py
@@ -1,14 +1,13 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+import django_dynamic_fixture as fixture
from django.test import TestCase
from django.test.utils import override_settings
-import django_dynamic_fixture as fixture
-
from readthedocs.projects.models import Project
@override_settings(
- USE_SUBDOMAIN=True, PUBLIC_DOMAIN='public.readthedocs.org', SERVE_PUBLIC_DOCS=True
+ USE_SUBDOMAIN=True, PUBLIC_DOMAIN='public.readthedocs.org', SERVE_PUBLIC_DOCS=True,
)
class RedirectSingleVersionTests(TestCase):
@@ -17,16 +16,24 @@ def setUp(self):
def test_docs_url_generation(self):
with override_settings(USE_SUBDOMAIN=False):
- self.assertEqual(self.pip.get_docs_url(),
- 'http://readthedocs.org/docs/pip/')
+ self.assertEqual(
+ self.pip.get_docs_url(),
+ 'http://readthedocs.org/docs/pip/',
+ )
with override_settings(USE_SUBDOMAIN=True):
- self.assertEqual(self.pip.get_docs_url(),
- 'http://pip.public.readthedocs.org/')
+ self.assertEqual(
+ self.pip.get_docs_url(),
+ 'http://pip.public.readthedocs.org/',
+ )
self.pip.single_version = False
with override_settings(USE_SUBDOMAIN=False):
- self.assertEqual(self.pip.get_docs_url(),
- 'http://readthedocs.org/docs/pip/en/latest/')
+ self.assertEqual(
+ self.pip.get_docs_url(),
+ 'http://readthedocs.org/docs/pip/en/latest/',
+ )
with override_settings(USE_SUBDOMAIN=True):
- self.assertEqual(self.pip.get_docs_url(),
- 'http://pip.public.readthedocs.org/en/latest/')
+ self.assertEqual(
+ self.pip.get_docs_url(),
+ 'http://pip.public.readthedocs.org/en/latest/',
+ )
diff --git a/readthedocs/rtd_tests/tests/test_subprojects.py b/readthedocs/rtd_tests/tests/test_subprojects.py
index 081bd89a0bb..255325d7a2f 100644
--- a/readthedocs/rtd_tests/tests/test_subprojects.py
+++ b/readthedocs/rtd_tests/tests/test_subprojects.py
@@ -1,7 +1,6 @@
-from __future__ import absolute_import
-
-import mock
+# -*- coding: utf-8 -*-
import django_dynamic_fixture as fixture
+import mock
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
@@ -19,13 +18,13 @@ def test_empty_child(self):
form = ProjectRelationshipForm(
{},
project=project,
- user=user
+ user=user,
)
form.full_clean()
self.assertEqual(len(form.errors['child']), 1)
self.assertRegex(
form.errors['child'][0],
- r'This field is required.'
+ r'This field is required.',
)
def test_nonexistent_child(self):
@@ -35,13 +34,13 @@ def test_nonexistent_child(self):
form = ProjectRelationshipForm(
{'child': 9999},
project=project,
- user=user
+ user=user,
)
form.full_clean()
self.assertEqual(len(form.errors['child']), 1)
self.assertRegex(
form.errors['child'][0],
- r'Select a valid choice.'
+ r'Select a valid choice.',
)
def test_adding_subproject_fails_when_user_is_not_admin(self):
@@ -58,13 +57,13 @@ def test_adding_subproject_fails_when_user_is_not_admin(self):
form = ProjectRelationshipForm(
{'child': subproject.pk},
project=project,
- user=user
+ user=user,
)
form.full_clean()
self.assertEqual(len(form.errors['child']), 1)
self.assertRegex(
form.errors['child'][0],
- r'Select a valid choice.'
+ r'Select a valid choice.',
)
def test_adding_subproject_passes_when_user_is_admin(self):
@@ -82,14 +81,14 @@ def test_adding_subproject_passes_when_user_is_admin(self):
form = ProjectRelationshipForm(
{'child': subproject.pk},
project=project,
- user=user
+ user=user,
)
form.full_clean()
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(
[r.child for r in project.subprojects.all()],
- [subproject]
+ [subproject],
)
def test_subproject_form_cant_create_sub_sub_project(self):
@@ -98,7 +97,7 @@ def test_subproject_form_cant_create_sub_sub_project(self):
subproject = fixture.get(Project, users=[user])
subsubproject = fixture.get(Project, users=[user])
relation = fixture.get(
- ProjectRelationship, parent=project, child=subproject
+ ProjectRelationship, parent=project, child=subproject,
)
self.assertQuerysetEqual(
Project.objects.for_admin_user(user),
@@ -109,7 +108,7 @@ def test_subproject_form_cant_create_sub_sub_project(self):
form = ProjectRelationshipForm(
{'child': subsubproject.pk},
project=subproject,
- user=user
+ user=user,
)
# The subsubproject is valid here, as far as the child check is
# concerned, but the parent check should fail.
@@ -121,7 +120,7 @@ def test_subproject_form_cant_create_sub_sub_project(self):
self.assertEqual(len(form.errors['parent']), 1)
self.assertRegex(
form.errors['parent'][0],
- r'Subproject nesting is not supported'
+ r'Subproject nesting is not supported',
)
def test_excludes_existing_subprojects(self):
@@ -129,7 +128,7 @@ def test_excludes_existing_subprojects(self):
project = fixture.get(Project, users=[user])
subproject = fixture.get(Project, users=[user])
relation = fixture.get(
- ProjectRelationship, parent=project, child=subproject
+ ProjectRelationship, parent=project, child=subproject,
)
self.assertQuerysetEqual(
Project.objects.for_admin_user(user),
@@ -140,7 +139,7 @@ def test_excludes_existing_subprojects(self):
form = ProjectRelationshipForm(
{'child': subproject.pk},
project=project,
- user=user
+ user=user,
)
self.assertEqual(
[proj_id for (proj_id, __) in form.fields['child'].choices],
@@ -154,12 +153,12 @@ def test_exclude_self_project_as_subproject(self):
form = ProjectRelationshipForm(
{'child': project.pk},
project=project,
- user=user
+ user=user,
)
self.assertFalse(form.is_valid())
self.assertNotIn(
project.id,
- [proj_id for (proj_id, __) in form.fields['child'].choices]
+ [proj_id for (proj_id, __) in form.fields['child'].choices],
)
@@ -171,12 +170,16 @@ def setUp(self):
self.owner = create_user(username='owner', password='test')
self.tester = create_user(username='tester', password='test')
self.pip = fixture.get(Project, slug='pip', users=[self.owner], main_language_project=None)
- self.subproject = fixture.get(Project, slug='sub', language='ja',
- users=[ self.owner],
- main_language_project=None)
- self.translation = fixture.get(Project, slug='trans', language='ja',
- users=[ self.owner],
- main_language_project=None)
+ self.subproject = fixture.get(
+ Project, slug='sub', language='ja',
+ users=[ self.owner],
+ main_language_project=None,
+ )
+ self.translation = fixture.get(
+ Project, slug='trans', language='ja',
+ users=[ self.owner],
+ main_language_project=None,
+ )
self.pip.add_subproject(self.subproject)
self.pip.translations.add(self.translation)
@@ -188,13 +191,13 @@ def setUp(self):
@override_settings(
PRODUCTION_DOMAIN='readthedocs.org',
USE_SUBDOMAIN=False,
- )
+ )
def test_resolver_subproject_alias(self):
resp = self.client.get('/docs/pip/projects/sub_alias/')
self.assertEqual(resp.status_code, 302)
self.assertEqual(
resp._headers['location'][1],
- 'http://readthedocs.org/docs/pip/projects/sub_alias/ja/latest/'
+ 'http://readthedocs.org/docs/pip/projects/sub_alias/ja/latest/',
)
@override_settings(USE_SUBDOMAIN=True)
@@ -203,5 +206,5 @@ def test_resolver_subproject_subdomain_alias(self):
self.assertEqual(resp.status_code, 302)
self.assertEqual(
resp._headers['location'][1],
- 'http://pip.readthedocs.org/projects/sub_alias/ja/latest/'
+ 'http://pip.readthedocs.org/projects/sub_alias/ja/latest/',
)
diff --git a/readthedocs/rtd_tests/tests/test_sync_versions.py b/readthedocs/rtd_tests/tests/test_sync_versions.py
index 533ad424729..37339d2a0e5 100644
--- a/readthedocs/rtd_tests/tests/test_sync_versions.py
+++ b/readthedocs/rtd_tests/tests/test_sync_versions.py
@@ -1,17 +1,9 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals
-)
-
import json
from django.test import TestCase
from django.urls import reverse
-import pytest
from readthedocs.builds.constants import BRANCH, STABLE, TAG
from readthedocs.builds.models import Version
@@ -177,7 +169,7 @@ def test_delete_version(self):
}
self.assertTrue(
- Version.objects.filter(slug='0.8.3').exists()
+ Version.objects.filter(slug='0.8.3').exists(),
)
self.client.post(
@@ -188,7 +180,7 @@ def test_delete_version(self):
# There isn't a v0.8.3
self.assertFalse(
- Version.objects.filter(slug='0.8.3').exists()
+ Version.objects.filter(slug='0.8.3').exists(),
)
def test_machine_attr_when_user_define_stable_tag_and_delete_it(self):
@@ -213,7 +205,7 @@ def test_machine_attr_when_user_define_stable_tag_and_delete_it(self):
# 0.8.3 is the current stable
self.assertEqual(
version8.identifier,
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
@@ -247,7 +239,7 @@ def test_machine_attr_when_user_define_stable_tag_and_delete_it(self):
current_stable = self.pip.get_stable_version()
self.assertEqual(
'1abc2def3',
- current_stable.identifier
+ current_stable.identifier,
)
# Deleting the tag should return the RTD's stable
@@ -278,7 +270,7 @@ def test_machine_attr_when_user_define_stable_tag_and_delete_it(self):
current_stable = self.pip.get_stable_version()
self.assertEqual(
'0.8.3',
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
@@ -325,7 +317,7 @@ def test_machine_attr_when_user_define_stable_tag_and_delete_it_new_project(self
current_stable = self.pip.get_stable_version()
self.assertEqual(
'1abc2def3',
- current_stable.identifier
+ current_stable.identifier,
)
# User activates the stable version
@@ -360,7 +352,7 @@ def test_machine_attr_when_user_define_stable_tag_and_delete_it_new_project(self
current_stable = self.pip.get_stable_version()
self.assertEqual(
'0.8.3',
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
@@ -388,7 +380,7 @@ def test_machine_attr_when_user_define_stable_branch_and_delete_it(self):
# 0.8.3 is the current stable
self.assertEqual(
'0.8.3',
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
@@ -420,7 +412,7 @@ def test_machine_attr_when_user_define_stable_branch_and_delete_it(self):
current_stable = self.pip.get_stable_version()
self.assertEqual(
'origin/stable',
- current_stable.identifier
+ current_stable.identifier,
)
# Deleting the branch should return the RTD's stable
@@ -449,18 +441,16 @@ def test_machine_attr_when_user_define_stable_branch_and_delete_it(self):
current_stable = self.pip.get_stable_version()
self.assertEqual(
'origin/0.8.3',
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
def test_machine_attr_when_user_define_stable_branch_and_delete_it_new_project(self):
- """
- The user imports a new project with a branch named ``stable``,
- when syncing the versions, the RTD's ``stable`` is lost
- (set to machine=False) and doesn't update automatically anymore,
- when the branch is deleted on the user repository, the RTD's ``stable``
- is back (set to machine=True).
- """
+ """The user imports a new project with a branch named ``stable``, when
+ syncing the versions, the RTD's ``stable`` is lost (set to
+ machine=False) and doesn't update automatically anymore, when the branch
+ is deleted on the user repository, the RTD's ``stable`` is back (set to
+ machine=True)."""
# There isn't a stable version yet
self.pip.versions.exclude(slug='master').delete()
current_stable = self.pip.get_stable_version()
@@ -494,7 +484,7 @@ def test_machine_attr_when_user_define_stable_branch_and_delete_it_new_project(s
current_stable = self.pip.get_stable_version()
self.assertEqual(
'origin/stable',
- current_stable.identifier
+ current_stable.identifier,
)
# User activates the stable version
@@ -527,18 +517,16 @@ def test_machine_attr_when_user_define_stable_branch_and_delete_it_new_project(s
current_stable = self.pip.get_stable_version()
self.assertEqual(
'origin/0.8.3',
- current_stable.identifier
+ current_stable.identifier,
)
self.assertTrue(current_stable.machine)
def test_machine_attr_when_user_define_latest_tag_and_delete_it(self):
- """
- The user creates a tag named ``latest`` on an existing repo,
- when syncing the versions, the RTD's ``latest`` is lost
- (set to machine=False) and doesn't update automatically anymore,
- when the tag is deleted on the user repository, the RTD's ``latest``
- is back (set to machine=True).
- """
+ """The user creates a tag named ``latest`` on an existing repo, when
+ syncing the versions, the RTD's ``latest`` is lost (set to
+ machine=False) and doesn't update automatically anymore, when the tag is
+ deleted on the user repository, the RTD's ``latest`` is back (set to
+ machine=True)."""
version_post_data = {
'branches': [
{
@@ -566,7 +554,7 @@ def test_machine_attr_when_user_define_latest_tag_and_delete_it(self):
version_latest = self.pip.versions.get(slug='latest')
self.assertEqual(
'1abc2def3',
- version_latest.identifier
+ version_latest.identifier,
)
# Deleting the tag should return the RTD's latest
@@ -577,7 +565,7 @@ def test_machine_attr_when_user_define_latest_tag_and_delete_it(self):
'verbose_name': 'master',
},
],
- 'tags': []
+ 'tags': [],
}
resp = self.client.post(
@@ -591,18 +579,16 @@ def test_machine_attr_when_user_define_latest_tag_and_delete_it(self):
version_latest = self.pip.versions.get(slug='latest')
self.assertEqual(
'master',
- version_latest.identifier
+ version_latest.identifier,
)
self.assertTrue(version_latest.machine)
def test_machine_attr_when_user_define_latest_branch_and_delete_it(self):
- """
- The user creates a branch named ``latest`` on an existing repo,
- when syncing the versions, the RTD's ``latest`` is lost
- (set to machine=False) and doesn't update automatically anymore,
- when the branch is deleted on the user repository, the RTD's ``latest``
- is back (set to machine=True).
- """
+ """The user creates a branch named ``latest`` on an existing repo, when
+ syncing the versions, the RTD's ``latest`` is lost (set to
+ machine=False) and doesn't update automatically anymore, when the branch
+ is deleted on the user repository, the RTD's ``latest`` is back (set to
+ machine=True)."""
version_post_data = {
'branches': [
{
@@ -628,7 +614,7 @@ def test_machine_attr_when_user_define_latest_branch_and_delete_it(self):
version_latest = self.pip.versions.get(slug='latest')
self.assertEqual(
'origin/latest',
- version_latest.identifier
+ version_latest.identifier,
)
# Deleting the branch should return the RTD's latest
@@ -682,7 +668,7 @@ def test_deletes_version_with_same_identifier(self):
# We only have one version with an identifier `1234`
self.assertEqual(
self.pip.versions.filter(identifier='1234').count(),
- 1
+ 1,
)
# We add a new tag with the same identifier
@@ -715,7 +701,7 @@ def test_deletes_version_with_same_identifier(self):
# We have two versions with an identifier `1234`
self.assertEqual(
self.pip.versions.filter(identifier='1234').count(),
- 2
+ 2,
)
# We delete one version with identifier `1234`
@@ -744,7 +730,7 @@ def test_deletes_version_with_same_identifier(self):
# We have only one version with an identifier `1234`
self.assertEqual(
self.pip.versions.filter(identifier='1234').count(),
- 1
+ 1,
)
@@ -840,7 +826,7 @@ def test_invalid_version_numbers_are_not_stable(self):
'tags': [
{
'identifier': 'this.is.invalid',
- 'verbose_name': 'this.is.invalid'
+ 'verbose_name': 'this.is.invalid',
},
],
}
@@ -861,7 +847,7 @@ def test_invalid_version_numbers_are_not_stable(self):
},
{
'identifier': 'this.is.invalid',
- 'verbose_name': 'this.is.invalid'
+ 'verbose_name': 'this.is.invalid',
},
],
}
@@ -911,7 +897,7 @@ def test_update_stable_version(self):
'identifier': '1.0.0',
'verbose_name': '1.0.0',
},
- ]
+ ],
}
self.client.post(
@@ -1123,12 +1109,12 @@ def test_user_defined_stable_version_tag_with_tags(self):
self.assertTrue(version_stable.active)
self.assertEqual(
'1abc2def3',
- self.pip.get_stable_version().identifier
+ self.pip.get_stable_version().identifier,
)
# There arent others stable slugs like stable_a
other_stable = self.pip.versions.filter(
- slug__startswith='stable_'
+ slug__startswith='stable_',
)
self.assertFalse(other_stable.exists())
@@ -1145,10 +1131,10 @@ def test_user_defined_stable_version_tag_with_tags(self):
self.assertTrue(version_stable.active)
self.assertEqual(
'1abc2def3',
- self.pip.get_stable_version().identifier
+ self.pip.get_stable_version().identifier,
)
other_stable = self.pip.versions.filter(
- slug__startswith='stable_'
+ slug__startswith='stable_',
)
self.assertFalse(other_stable.exists())
@@ -1211,11 +1197,11 @@ def test_user_defined_stable_version_branch_with_tags(self):
self.assertTrue(version_stable.active)
self.assertEqual(
'origin/stable',
- self.pip.get_stable_version().identifier
+ self.pip.get_stable_version().identifier,
)
# There arent others stable slugs like stable_a
other_stable = self.pip.versions.filter(
- slug__startswith='stable_'
+ slug__startswith='stable_',
)
self.assertFalse(other_stable.exists())
@@ -1232,10 +1218,10 @@ def test_user_defined_stable_version_branch_with_tags(self):
self.assertTrue(version_stable.active)
self.assertEqual(
'origin/stable',
- self.pip.get_stable_version().identifier
+ self.pip.get_stable_version().identifier,
)
other_stable = self.pip.versions.filter(
- slug__startswith='stable_'
+ slug__startswith='stable_',
)
self.assertFalse(other_stable.exists())
@@ -1291,12 +1277,12 @@ def test_user_defined_latest_version_tag(self):
self.assertTrue(version_latest.active)
self.assertEqual(
'1abc2def3',
- version_latest.identifier
+ version_latest.identifier,
)
# There arent others latest slugs like latest_a
other_latest = self.pip.versions.filter(
- slug__startswith='latest_'
+ slug__startswith='latest_',
)
self.assertFalse(other_latest.exists())
@@ -1313,10 +1299,10 @@ def test_user_defined_latest_version_tag(self):
self.assertTrue(version_latest.active)
self.assertEqual(
'1abc2def3',
- version_latest.identifier
+ version_latest.identifier,
)
other_latest = self.pip.versions.filter(
- slug__startswith='latest_'
+ slug__startswith='latest_',
)
self.assertFalse(other_latest.exists())
@@ -1348,12 +1334,12 @@ def test_user_defined_latest_version_branch(self):
self.assertTrue(version_latest.active)
self.assertEqual(
'origin/latest',
- version_latest.identifier
+ version_latest.identifier,
)
# There arent others latest slugs like latest_a
other_latest = self.pip.versions.filter(
- slug__startswith='latest_'
+ slug__startswith='latest_',
)
self.assertFalse(other_latest.exists())
@@ -1370,9 +1356,9 @@ def test_user_defined_latest_version_branch(self):
self.assertTrue(version_latest.active)
self.assertEqual(
'origin/latest',
- version_latest.identifier
+ version_latest.identifier,
)
other_latest = self.pip.versions.filter(
- slug__startswith='latest_'
+ slug__startswith='latest_',
)
self.assertFalse(other_latest.exists())
diff --git a/readthedocs/rtd_tests/tests/test_urls.py b/readthedocs/rtd_tests/tests/test_urls.py
index d70e0be8b71..b01c9997648 100644
--- a/readthedocs/rtd_tests/tests/test_urls.py
+++ b/readthedocs/rtd_tests/tests/test_urls.py
@@ -1,7 +1,6 @@
-from __future__ import absolute_import
-
-from django.urls import reverse, NoReverseMatch
+# -*- coding: utf-8 -*-
from django.test import TestCase
+from django.urls import NoReverseMatch, reverse
class WipeUrlTests(TestCase):
@@ -48,7 +47,7 @@ class TestVersionURLs(TestCase):
def test_version_url_with_caps(self):
url = reverse(
'project_download_media',
- kwargs={'type_': 'pdf', 'version_slug': u'1.4.X', 'project_slug': u'django'}
+ kwargs={'type_': 'pdf', 'version_slug': '1.4.X', 'project_slug': 'django'},
)
self.assertTrue(url)
@@ -58,18 +57,18 @@ class TestProfileDetailURLs(TestCase):
def test_profile_detail_url(self):
url = reverse(
'profiles_profile_detail',
- kwargs={'username': 'foo+bar'}
- )
+ kwargs={'username': 'foo+bar'},
+ )
self.assertEqual(url, '/profiles/foo+bar/')
url = reverse(
'profiles_profile_detail',
- kwargs={'username': 'abc+def@ghi.jkl'}
- )
+ kwargs={'username': 'abc+def@ghi.jkl'},
+ )
self.assertEqual(url, '/profiles/abc+def@ghi.jkl/')
url = reverse(
'profiles_profile_detail',
- kwargs={'username': 'abc-def+ghi'}
- )
+ kwargs={'username': 'abc-def+ghi'},
+ )
self.assertEqual(url, '/profiles/abc-def+ghi/')
diff --git a/readthedocs/rtd_tests/tests/test_version_commit_name.py b/readthedocs/rtd_tests/tests/test_version_commit_name.py
index e9ffd833ee7..bf181c3d5da 100644
--- a/readthedocs/rtd_tests/tests/test_version_commit_name.py
+++ b/readthedocs/rtd_tests/tests/test_version_commit_name.py
@@ -1,15 +1,10 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
from django.test import TestCase
-from django_dynamic_fixture import get
-from django_dynamic_fixture import new
+from django_dynamic_fixture import get, new
-from readthedocs.builds.constants import BRANCH
-from readthedocs.builds.constants import LATEST
-from readthedocs.builds.constants import STABLE
-from readthedocs.builds.constants import TAG
+from readthedocs.builds.constants import BRANCH, LATEST, STABLE, TAG
from readthedocs.builds.models import Version
-from readthedocs.projects.constants import REPO_TYPE_GIT
-from readthedocs.projects.constants import REPO_TYPE_HG
+from readthedocs.projects.constants import REPO_TYPE_GIT, REPO_TYPE_HG
from readthedocs.projects.models import Project
@@ -20,44 +15,60 @@ def test_branch_name_unicode_non_ascii(self):
self.assertEqual(version.identifier_friendly, unicode_name)
def test_branch_name_made_friendly_when_sha(self):
- commit_hash = u'3d92b728b7d7b842259ac2020c2fa389f13aff0d'
- version = new(Version, identifier=commit_hash,
- slug=STABLE, verbose_name=STABLE, type=TAG)
+ commit_hash = '3d92b728b7d7b842259ac2020c2fa389f13aff0d'
+ version = new(
+ Version, identifier=commit_hash,
+ slug=STABLE, verbose_name=STABLE, type=TAG,
+ )
# we shorten commit hashes to keep things readable
self.assertEqual(version.identifier_friendly, '3d92b728')
def test_branch_name(self):
- version = new(Version, identifier=u'release-2.5.x',
- slug=u'release-2.5.x', verbose_name=u'release-2.5.x',
- type=BRANCH)
+ version = new(
+ Version, identifier='release-2.5.x',
+ slug='release-2.5.x', verbose_name='release-2.5.x',
+ type=BRANCH,
+ )
self.assertEqual(version.commit_name, 'release-2.5.x')
def test_tag_name(self):
- version = new(Version, identifier=u'10f1b29a2bd2', slug=u'release-2.5.0',
- verbose_name=u'release-2.5.0', type=TAG)
- self.assertEqual(version.commit_name, u'release-2.5.0')
+ version = new(
+ Version, identifier='10f1b29a2bd2', slug='release-2.5.0',
+ verbose_name='release-2.5.0', type=TAG,
+ )
+ self.assertEqual(version.commit_name, 'release-2.5.0')
def test_branch_with_name_stable(self):
- version = new(Version, identifier=u'origin/stable', slug=STABLE,
- verbose_name=u'stable', type=BRANCH)
- self.assertEqual(version.commit_name, u'stable')
+ version = new(
+ Version, identifier='origin/stable', slug=STABLE,
+ verbose_name='stable', type=BRANCH,
+ )
+ self.assertEqual(version.commit_name, 'stable')
def test_stable_version_tag(self):
- version = new(Version,
- identifier=u'3d92b728b7d7b842259ac2020c2fa389f13aff0d',
- slug=STABLE, verbose_name=STABLE, type=TAG)
- self.assertEqual(version.commit_name,
- u'3d92b728b7d7b842259ac2020c2fa389f13aff0d')
+ version = new(
+ Version,
+ identifier='3d92b728b7d7b842259ac2020c2fa389f13aff0d',
+ slug=STABLE, verbose_name=STABLE, type=TAG,
+ )
+ self.assertEqual(
+ version.commit_name,
+ '3d92b728b7d7b842259ac2020c2fa389f13aff0d',
+ )
def test_hg_latest_branch(self):
hg_project = get(Project, repo_type=REPO_TYPE_HG)
- version = new(Version, identifier=u'default', slug=LATEST,
- verbose_name=LATEST, type=BRANCH, project=hg_project)
- self.assertEqual(version.commit_name, u'default')
+ version = new(
+ Version, identifier='default', slug=LATEST,
+ verbose_name=LATEST, type=BRANCH, project=hg_project,
+ )
+ self.assertEqual(version.commit_name, 'default')
def test_git_latest_branch(self):
git_project = get(Project, repo_type=REPO_TYPE_GIT)
- version = new(Version, project=git_project,
- identifier=u'origin/master', slug=LATEST,
- verbose_name=LATEST, type=BRANCH)
- self.assertEqual(version.commit_name, u'master')
+ version = new(
+ Version, project=git_project,
+ identifier='origin/master', slug=LATEST,
+ verbose_name=LATEST, type=BRANCH,
+ )
+ self.assertEqual(version.commit_name, 'master')
diff --git a/readthedocs/rtd_tests/tests/test_version_config.py b/readthedocs/rtd_tests/tests/test_version_config.py
index 82286ade4bf..2bcd61104de 100644
--- a/readthedocs/rtd_tests/tests/test_version_config.py
+++ b/readthedocs/rtd_tests/tests/test_version_config.py
@@ -1,5 +1,4 @@
-from __future__ import division, print_function, unicode_literals
-
+# -*- coding: utf-8 -*-
from django.test import TestCase
from django_dynamic_fixture import get
@@ -17,12 +16,12 @@ def test_get_correct_config(self):
build_old = Build.objects.create(
project=self.project,
version=self.version,
- config={'version': 1}
+ config={'version': 1},
)
build_new = Build.objects.create(
project=self.project,
version=self.version,
- config={'version': 2}
+ config={'version': 2},
)
build_new_error = Build.objects.create(
project=self.project,
@@ -43,7 +42,7 @@ def test_get_correct_config_when_same_config(self):
Build,
project=self.project,
version=self.version,
- config={}
+ config={},
)
build_old.config = {'version': 1}
build_old.save()
@@ -52,7 +51,7 @@ def test_get_correct_config_when_same_config(self):
Build,
project=self.project,
version=self.version,
- config={}
+ config={},
)
build_new.config = {'version': 1}
build_new.save()
diff --git a/readthedocs/rtd_tests/tests/test_version_slug.py b/readthedocs/rtd_tests/tests/test_version_slug.py
index 31cf2ca72fc..a0d4b4b28b8 100644
--- a/readthedocs/rtd_tests/tests/test_version_slug.py
+++ b/readthedocs/rtd_tests/tests/test_version_slug.py
@@ -1,10 +1,10 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
import re
+
from django.test import TestCase
from readthedocs.builds.models import Version
-from readthedocs.builds.version_slug import VersionSlugField
-from readthedocs.builds.version_slug import VERSION_SLUG_REGEX
+from readthedocs.builds.version_slug import VERSION_SLUG_REGEX, VersionSlugField
from readthedocs.projects.models import Project
@@ -27,7 +27,7 @@ def test_multiple_words(self):
class VersionSlugFieldTests(TestCase):
- fixtures = ["eric", "test_data"]
+ fixtures = ['eric', 'test_data']
def setUp(self):
self.pip = Project.objects.get(slug='pip')
@@ -35,58 +35,68 @@ def setUp(self):
def test_saving(self):
version = Version.objects.create(
verbose_name='1.0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, '1.0')
def test_normalizing(self):
version = Version.objects.create(
verbose_name='1%0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, '1-0')
def test_normalizing_slashes(self):
version = Version.objects.create(
verbose_name='releases/1.0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, 'releases-1.0')
def test_uppercase(self):
version = Version.objects.create(
verbose_name='SomeString-charclass',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, 'somestring-charclass')
def test_placeholder_as_name(self):
version = Version.objects.create(
verbose_name='-',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, 'unknown')
def test_multiple_empty_names(self):
version = Version.objects.create(
verbose_name='-',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, 'unknown')
version = Version.objects.create(
verbose_name='-./.-',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, 'unknown_a')
def test_uniqueness(self):
version = Version.objects.create(
verbose_name='1!0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, '1-0')
version = Version.objects.create(
verbose_name='1%0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, '1-0_a')
version = Version.objects.create(
verbose_name='1?0',
- project=self.pip)
+ project=self.pip,
+ )
self.assertEqual(version.slug, '1-0_b')
def test_uniquifying_suffix(self):
diff --git a/readthedocs/rtd_tests/tests/test_views.py b/readthedocs/rtd_tests/tests/test_views.py
index e68989e1483..21515ad886b 100644
--- a/readthedocs/rtd_tests/tests/test_views.py
+++ b/readthedocs/rtd_tests/tests/test_views.py
@@ -1,16 +1,10 @@
# -*- coding: utf-8 -*-
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
+from urllib.parse import urlsplit
import mock
from django.contrib.auth.models import User
-from django.urls import reverse
from django.test import TestCase
-from django.utils.six.moves.urllib.parse import urlsplit
+from django.urls import reverse
from django_dynamic_fixture import get, new
from readthedocs.builds.constants import LATEST
@@ -19,6 +13,7 @@
from readthedocs.projects.forms import UpdateProjectForm
from readthedocs.projects.models import ImportedFile, Project
+
class Testmaker(TestCase):
def setUp(self):
@@ -156,7 +151,7 @@ def test_project_translations(self):
def test_project_translations_delete(self):
response = self.client.get(
- '/dashboard/pip/translations/delete/a-translation/'
+ '/dashboard/pip/translations/delete/a-translation/',
)
self.assertRedirectToLogin(response)
@@ -219,13 +214,13 @@ def setUp(self):
def test_deny_delete_for_non_project_admins(self):
response = self.client.get(
- '/dashboard/my-mainproject/subprojects/delete/my-subproject/'
+ '/dashboard/my-mainproject/subprojects/delete/my-subproject/',
)
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.subproject in
- [r.child for r in self.project.subprojects.all()]
+ [r.child for r in self.project.subprojects.all()],
)
def test_admins_can_delete_subprojects(self):
@@ -244,7 +239,7 @@ def test_admins_can_delete_subprojects(self):
self.assertEqual(response.status_code, 405)
self.assertTrue(
self.subproject in
- [r.child for r in self.project.subprojects.all()]
+ [r.child for r in self.project.subprojects.all()],
)
# Test POST
response = self.client.post(
@@ -253,11 +248,11 @@ def test_admins_can_delete_subprojects(self):
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.subproject not in
- [r.child for r in self.project.subprojects.all()]
+ [r.child for r in self.project.subprojects.all()],
)
def test_project_admins_can_delete_subprojects_that_they_are_not_admin_of(
- self
+ self,
):
self.project.users.add(self.user)
self.assertFalse(AdminPermission.is_admin(self.user, self.subproject))
@@ -268,7 +263,7 @@ def test_project_admins_can_delete_subprojects_that_they_are_not_admin_of(
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.subproject not in
- [r.child for r in self.project.subprojects.all()]
+ [r.child for r in self.project.subprojects.all()],
)
diff --git a/readthedocs/rtd_tests/utils.py b/readthedocs/rtd_tests/utils.py
index 1908ef0ccd5..79f6eb0bbef 100644
--- a/readthedocs/rtd_tests/utils.py
+++ b/readthedocs/rtd_tests/utils.py
@@ -1,13 +1,6 @@
# -*- coding: utf-8 -*-
"""Utility functions for use in tests."""
-from __future__ import (
- absolute_import,
- division,
- print_function,
- unicode_literals,
-)
-
import logging
import subprocess
import textwrap
@@ -22,13 +15,12 @@
from readthedocs.doc_builder.base import restoring_chdir
+
log = logging.getLogger(__name__)
def get_readthedocs_app_path():
- """
- Return the absolute path of the ``readthedocs`` app.
- """
+ """Return the absolute path of the ``readthedocs`` app."""
try:
import readthedocs
@@ -42,7 +34,7 @@ def get_readthedocs_app_path():
def check_output(command, env=None):
output = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- env=env
+ env=env,
).communicate()[0]
log.info(output)
return output
@@ -61,7 +53,7 @@ def make_test_git():
# the repo
check_output(['git', 'checkout', '-b', 'submodule', 'master'], env=env)
add_git_submodule_without_cloning(
- directory, 'foobar', 'https://foobar.com/git'
+ directory, 'foobar', 'https://foobar.com/git',
)
check_output(['git', 'add', '.'], env=env)
check_output(['git', 'commit', '-m"Add submodule"'], env=env)
@@ -69,7 +61,7 @@ def make_test_git():
# Add an invalid submodule URL in the invalidsubmodule branch
check_output(['git', 'checkout', '-b', 'invalidsubmodule', 'master'], env=env)
add_git_submodule_without_cloning(
- directory, 'invalid', 'git@github.com:rtfd/readthedocs.org.git'
+ directory, 'invalid', 'git@github.com:rtfd/readthedocs.org.git',
)
check_output(['git', 'add', '.'], env=env)
check_output(['git', 'commit', '-m"Add invalid submodule"'], env=env)
@@ -130,11 +122,11 @@ def make_git_repo(directory, name='sample_repo'):
check_output(['git', 'init'] + [directory], env=env)
check_output(
['git', 'config', 'user.email', 'dev@readthedocs.org'],
- env=env
+ env=env,
)
check_output(
['git', 'config', 'user.name', 'Read the Docs'],
- env=env
+ env=env,
)
# Set up the actual repository
@@ -187,8 +179,10 @@ def delete_git_branch(directory, branch):
@restoring_chdir
-def create_git_submodule(directory, submodule,
- msg='Add realative submodule', branch='master'):
+def create_git_submodule(
+ directory, submodule,
+ msg='Add realative submodule', branch='master',
+):
env = environ.copy()
env['GIT_DIR'] = pjoin(directory, '.git')
chdir(directory)
diff --git a/readthedocs/search/indexes.py b/readthedocs/search/indexes.py
index 48e4baecc5e..05a2f759ac7 100644
--- a/readthedocs/search/indexes.py
+++ b/readthedocs/search/indexes.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
Search indexing classes to index into Elasticsearch.
@@ -12,20 +14,14 @@
TODO: Handle page removal case in Page.
-
"""
-from __future__ import absolute_import
-from builtins import object
-
+from django.conf import settings
from django.utils import timezone
-
from elasticsearch import Elasticsearch, exceptions
from elasticsearch.helpers import bulk_index
-from django.conf import settings
-
-class Index(object):
+class Index:
"""Base class to define some common methods across indexes."""
@@ -41,9 +37,8 @@ def get_settings(self, settings_override=None):
"""
Returns settings to be passed to ES create_index.
- If `settings_override` is provided, this will use `settings_override`
- to override the defaults defined here.
-
+ If `settings_override` is provided, this will use `settings_override` to
+ override the defaults defined here.
"""
default_settings = {
'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS,
@@ -67,7 +62,6 @@ def get_analysis(self):
define the stopwords for that language.
For all languages we've customized we're using the ICU plugin.
-
"""
analyzers = {}
filters = {}
@@ -92,15 +86,16 @@ def get_analysis(self):
}
def timestamped_index(self):
- return '{0}-{1}'.format(
- self._index, timezone.now().strftime('%Y%m%d%H%M%S'))
+ return '{}-{}'.format(
+ self._index,
+ timezone.now().strftime('%Y%m%d%H%M%S'),
+ )
def create_index(self, index=None):
"""
Creates index.
This uses `get_settings` and `get_mappings` to define the index.
-
"""
index = index or self._index
body = {
@@ -116,8 +111,14 @@ def put_mapping(self, index=None):
index = index or self._index
self.es.indices.put_mapping(self._type, self.get_mapping(), index)
- def bulk_index(self, data, index=None, chunk_size=500, parent=None,
- routing=None):
+ def bulk_index(
+ self,
+ data,
+ index=None,
+ chunk_size=500,
+ parent=None,
+ routing=None,
+ ):
"""
Given a list of documents, uses Elasticsearch bulk indexing.
@@ -125,7 +126,6 @@ def bulk_index(self, data, index=None, chunk_size=500, parent=None,
`chunk_size` defaults to the elasticsearch lib's default. Override per
your document size as needed.
-
"""
index = index or self._index
docs = []
@@ -152,7 +152,7 @@ def index_document(self, data, index=None, parent=None, routing=None):
'index': index or self._index,
'doc_type': self._type,
'body': doc,
- 'id': doc['id']
+ 'id': doc['id'],
}
if parent:
kwargs['parent'] = parent
@@ -202,8 +202,12 @@ def update_aliases(self, new_index, delete=True):
actions = []
if old_index:
- actions.append({'remove': {'index': old_index,
- 'alias': self._index}})
+ actions.append({
+ 'remove': {
+ 'index': old_index,
+ 'alias': self._index,
+ },
+ })
actions.append({'add': {'index': new_index, 'alias': self._index}})
self.es.indices.update_aliases(body={'actions': actions})
@@ -213,13 +217,14 @@ def update_aliases(self, new_index, delete=True):
self.es.indices.delete(index=old_index)
def search(self, body, **kwargs):
- return self.es.search(index=self._index, doc_type=self._type,
- body=body, **kwargs)
+ return self.es.search(
+ index=self._index, doc_type=self._type, body=body, **kwargs
+ )
class ProjectIndex(Index):
- """Search index configuration for Projects"""
+ """Search index configuration for Projects."""
_type = 'project'
@@ -231,8 +236,10 @@ def get_mapping(self):
'properties': {
'id': {'type': 'long'},
'name': {'type': 'string', 'analyzer': 'default_icu'},
- 'description': {'type': 'string', 'analyzer': 'default_icu'},
-
+ 'description': {
+ 'type': 'string',
+ 'analyzer': 'default_icu',
+ },
'slug': {'type': 'string', 'index': 'not_analyzed'},
'lang': {'type': 'string', 'index': 'not_analyzed'},
'tags': {'type': 'string', 'index': 'not_analyzed'},
@@ -250,8 +257,8 @@ def get_mapping(self):
'url': {'type': 'string', 'index': 'not_analyzed'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
- }
- }
+ },
+ },
}
return mapping
@@ -259,7 +266,16 @@ def get_mapping(self):
def extract_document(self, data):
doc = {}
- attrs = ('id', 'name', 'slug', 'description', 'lang', 'tags', 'author', 'url')
+ attrs = (
+ 'id',
+ 'name',
+ 'slug',
+ 'description',
+ 'lang',
+ 'tags',
+ 'author',
+ 'url',
+ )
for attr in attrs:
doc[attr] = data.get(attr, '')
@@ -271,7 +287,7 @@ def extract_document(self, data):
class PageIndex(Index):
- """Search index configuration for Pages"""
+ """Search index configuration for Pages."""
_type = 'page'
_parent = 'project'
@@ -291,14 +307,13 @@ def get_mapping(self):
'path': {'type': 'string', 'index': 'not_analyzed'},
'taxonomy': {'type': 'string', 'index': 'not_analyzed'},
'commit': {'type': 'string', 'index': 'not_analyzed'},
-
'title': {'type': 'string', 'analyzer': 'default_icu'},
'headers': {'type': 'string', 'analyzer': 'default_icu'},
'content': {'type': 'string', 'analyzer': 'default_icu'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
- }
- }
+ },
+ },
}
return mapping
@@ -306,8 +321,17 @@ def get_mapping(self):
def extract_document(self, data):
doc = {}
- attrs = ('id', 'project', 'title', 'headers', 'version', 'path',
- 'content', 'taxonomy', 'commit')
+ attrs = (
+ 'id',
+ 'project',
+ 'title',
+ 'headers',
+ 'version',
+ 'path',
+ 'content',
+ 'taxonomy',
+ 'commit',
+ )
for attr in attrs:
doc[attr] = data.get(attr, '')
@@ -319,7 +343,7 @@ def extract_document(self, data):
class SectionIndex(Index):
- """Search index configuration for Sections"""
+ """Search index configuration for Sections."""
_type = 'section'
_parent = 'page'
@@ -350,13 +374,16 @@ def get_mapping(self):
'blocks': {
'type': 'object',
'properties': {
- 'code': {'type': 'string', 'analyzer': 'default_icu'}
- }
+ 'code': {
+ 'type': 'string',
+ 'analyzer': 'default_icu',
+ },
+ },
},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
- }
- }
+ },
+ },
}
return mapping
@@ -364,7 +391,16 @@ def get_mapping(self):
def extract_document(self, data):
doc = {}
- attrs = ('id', 'project', 'title', 'page_id', 'version', 'path', 'content', 'commit')
+ attrs = (
+ 'id',
+ 'project',
+ 'title',
+ 'page_id',
+ 'version',
+ 'path',
+ 'content',
+ 'commit',
+ )
for attr in attrs:
doc[attr] = data.get(attr, '')
diff --git a/readthedocs/search/lib.py b/readthedocs/search/lib.py
index 8500a829b03..007a95afdd9 100644
--- a/readthedocs/search/lib.py
+++ b/readthedocs/search/lib.py
@@ -1,55 +1,65 @@
+# -*- coding: utf-8 -*-
+
"""Utilities related to searching Elastic."""
-from __future__ import absolute_import
-from __future__ import print_function
from pprint import pprint
from django.conf import settings
-from .indexes import PageIndex, ProjectIndex, SectionIndex
-
from readthedocs.builds.constants import LATEST
from readthedocs.projects.models import Project
-from readthedocs.search.signals import (before_project_search,
- before_file_search,
- before_section_search)
+from readthedocs.search.signals import (
+ before_file_search,
+ before_project_search,
+ before_section_search,
+)
+
+from .indexes import PageIndex, ProjectIndex, SectionIndex
def search_project(request, query, language=None):
"""Search index for projects matching query."""
body = {
- "query": {
- "bool": {
- "should": [
- {"match": {"name": {"query": query, "boost": 10}}},
- {"match": {"description": {"query": query}}},
- ]
+ 'query': {
+ 'bool': {
+ 'should': [
+ {'match': {'name': {'query': query, 'boost': 10}}},
+ {'match': {'description': {'query': query}}},
+ ],
},
},
- "facets": {
- "language": {
- "terms": {"field": "lang"},
+ 'facets': {
+ 'language': {
+ 'terms': {'field': 'lang'},
},
},
- "highlight": {
- "fields": {
- "name": {},
- "description": {},
- }
+ 'highlight': {
+ 'fields': {
+ 'name': {},
+ 'description': {},
+ },
},
- "fields": ["name", "slug", "description", "lang", "url"],
- "size": 50 # TODO: Support pagination.
+ 'fields': ['name', 'slug', 'description', 'lang', 'url'],
+ 'size': 50, # TODO: Support pagination.
}
if language:
- body['facets']['language']['facet_filter'] = {"term": {"lang": language}}
- body['filter'] = {"term": {"lang": language}}
+ body['facets']['language']['facet_filter'] = {
+ 'term': {'lang': language},
+ }
+ body['filter'] = {'term': {'lang': language}}
before_project_search.send(request=request, sender=ProjectIndex, body=body)
return ProjectIndex().search(body)
-def search_file(request, query, project_slug=None, version_slug=LATEST, taxonomy=None):
+def search_file(
+ request,
+ query,
+ project_slug=None,
+ version_slug=LATEST,
+ taxonomy=None,
+):
"""
Search index for files matching query.
@@ -63,78 +73,90 @@ def search_file(request, query, project_slug=None, version_slug=LATEST, taxonomy
"""
kwargs = {}
body = {
- "query": {
- "bool": {
- "should": [
- {"match_phrase": {
- "title": {
- "query": query,
- "boost": 10,
- "slop": 2,
+ 'query': {
+ 'bool': {
+ 'should': [
+ {
+ 'match_phrase': {
+ 'title': {
+ 'query': query,
+ 'boost': 10,
+ 'slop': 2,
+ },
},
- }},
- {"match_phrase": {
- "headers": {
- "query": query,
- "boost": 5,
- "slop": 3,
+ },
+ {
+ 'match_phrase': {
+ 'headers': {
+ 'query': query,
+ 'boost': 5,
+ 'slop': 3,
+ },
},
- }},
- {"match_phrase": {
- "content": {
- "query": query,
- "slop": 5,
+ },
+ {
+ 'match_phrase': {
+ 'content': {
+ 'query': query,
+ 'slop': 5,
+ },
},
- }},
- ]
- }
+ },
+ ],
+ },
},
- "facets": {
- "taxonomy": {
- "terms": {"field": "taxonomy"},
+ 'facets': {
+ 'taxonomy': {
+ 'terms': {'field': 'taxonomy'},
},
- "project": {
- "terms": {"field": "project"},
+ 'project': {
+ 'terms': {'field': 'project'},
},
- "version": {
- "terms": {"field": "version"},
+ 'version': {
+ 'terms': {'field': 'version'},
},
},
- "highlight": {
- "fields": {
- "title": {},
- "headers": {},
- "content": {},
- }
+ 'highlight': {
+ 'fields': {
+ 'title': {},
+ 'headers': {},
+ 'content': {},
+ },
},
- "fields": ["title", "project", "version", "path"],
- "size": 50 # TODO: Support pagination.
+ 'fields': ['title', 'project', 'version', 'path'],
+ 'size': 50, # TODO: Support pagination.
}
if project_slug or version_slug or taxonomy:
- final_filter = {"and": []}
+ final_filter = {'and': []}
if project_slug:
try:
- project = (Project.objects
- .api(request.user)
- .get(slug=project_slug))
+ project = (
+ Project.objects.api(request.user).get(slug=project_slug)
+ )
project_slugs = [project.slug]
# We need to use the obtuse syntax here because the manager
# doesn't pass along to ProjectRelationships
- project_slugs.extend(s.slug for s
- in Project.objects.public(
- request.user).filter(
- superprojects__parent__slug=project.slug))
- final_filter['and'].append({"terms": {"project": project_slugs}})
+ project_slugs.extend(
+ s.slug for s in Project.objects.public(
+ request.user,
+ ).filter(
+ superprojects__parent__slug=project.slug,
+ )
+ )
+ final_filter['and'].append({
+ 'terms': {'project': project_slugs},
+ })
# Add routing to optimize search by hitting the right shard.
# This purposely doesn't apply routing if the project has more
# than one parent project.
if project.superprojects.exists():
if project.superprojects.count() == 1:
- kwargs['routing'] = (project.superprojects.first()
- .parent.slug)
+ kwargs['routing'] = (
+ project.superprojects.first().parent.slug
+ )
else:
kwargs['routing'] = project_slug
except Project.DoesNotExist:
@@ -152,18 +174,23 @@ def search_file(request, query, project_slug=None, version_slug=LATEST, taxonomy
body['facets']['taxonomy']['facet_filter'] = final_filter
if settings.DEBUG:
- print("Before Signal")
+ print('Before Signal')
pprint(body)
before_file_search.send(request=request, sender=PageIndex, body=body)
if settings.DEBUG:
- print("After Signal")
+ print('After Signal')
pprint(body)
return PageIndex().search(body, **kwargs)
-def search_section(request, query, project_slug=None, version_slug=LATEST,
- path=None):
+def search_section(
+ request,
+ query,
+ project_slug=None,
+ version_slug=LATEST,
+ path=None,
+):
"""
Search for a section of content.
@@ -179,70 +206,74 @@ def search_section(request, query, project_slug=None, version_slug=LATEST,
"""
kwargs = {}
body = {
- "query": {
- "bool": {
- "should": [
- {"match_phrase": {
- "title": {
- "query": query,
- "boost": 10,
- "slop": 2,
+ 'query': {
+ 'bool': {
+ 'should': [
+ {
+ 'match_phrase': {
+ 'title': {
+ 'query': query,
+ 'boost': 10,
+ 'slop': 2,
+ },
},
- }},
- {"match_phrase": {
- "content": {
- "query": query,
- "slop": 5,
+ },
+ {
+ 'match_phrase': {
+ 'content': {
+ 'query': query,
+ 'slop': 5,
+ },
},
- }},
- ]
- }
+ },
+ ],
+ },
},
- "facets": {
- "project": {
- "terms": {"field": "project"},
- "facet_filter": {
- "term": {"version": version_slug},
- }
+ 'facets': {
+ 'project': {
+ 'terms': {'field': 'project'},
+ 'facet_filter': {
+ 'term': {'version': version_slug},
+ },
},
},
- "highlight": {
- "fields": {
- "title": {},
- "content": {},
- }
+ 'highlight': {
+ 'fields': {
+ 'title': {},
+ 'content': {},
+ },
},
- "fields": ["title", "project", "version", "path", "page_id", "content"],
- "size": 10 # TODO: Support pagination.
+ 'fields': ['title', 'project', 'version', 'path', 'page_id', 'content'],
+ 'size': 10, # TODO: Support pagination.
}
if project_slug:
body['filter'] = {
- "and": [
- {"term": {"project": project_slug}},
- {"term": {"version": version_slug}},
- ]
+ 'and': [
+ {'term': {'project': project_slug}},
+ {'term': {'version': version_slug}},
+ ],
}
body['facets']['path'] = {
- "terms": {"field": "path"},
- "facet_filter": {
- "term": {"project": project_slug},
- }
+ 'terms': {'field': 'path'},
+ 'facet_filter': {
+ 'term': {'project': project_slug},
+ },
},
# Add routing to optimize search by hitting the right shard.
kwargs['routing'] = project_slug
if path:
body['filter'] = {
- "and": [
- {"term": {"path": path}},
- ]
+ 'and': [
+ {'term': {'path': path}},
+ ],
}
if path and not project_slug:
# Show facets when we only have a path
body['facets']['path'] = {
- "terms": {"field": "path"}
+ 'terms': {'field': 'path'},
}
before_section_search.send(request=request, sender=PageIndex, body=body)
diff --git a/readthedocs/search/parse_json.py b/readthedocs/search/parse_json.py
index 9b19a7e7cb3..194c55ff165 100644
--- a/readthedocs/search/parse_json.py
+++ b/readthedocs/search/parse_json.py
@@ -1,31 +1,36 @@
# -*- coding: utf-8 -*-
-"""Functions related to converting content into dict/JSON structures."""
-from __future__ import absolute_import
+"""Functions related to converting content into dict/JSON structures."""
-import logging
import codecs
import fnmatch
import json
+import logging
import os
-from builtins import next, range # pylint: disable=redefined-builtin
from pyquery import PyQuery
+
log = logging.getLogger(__name__)
def process_all_json_files(version, build_dir=True):
- """Return a list of pages to index"""
+ """Return a list of pages to index."""
if build_dir:
full_path = version.project.full_json_path(version.slug)
else:
full_path = version.project.get_production_media_path(
- type_='json', version_slug=version.slug, include_file=False)
+ type_='json',
+ version_slug=version.slug,
+ include_file=False,
+ )
html_files = []
for root, _, files in os.walk(full_path):
for filename in fnmatch.filter(files, '*.fjson'):
- if filename in ['search.fjson', 'genindex.fjson', 'py-modindex.fjson']:
+ if filename in [
+ 'search.fjson',
+ 'genindex.fjson',
+ 'py-modindex.fjson']:
continue
html_files.append(os.path.join(root, filename))
page_list = []
@@ -57,15 +62,15 @@ def generate_sections_from_pyquery(body):
h1_section = body('.section > h1')
if h1_section:
div = h1_section.parent()
- h1_title = h1_section.text().replace(u'¶', '').strip()
+ h1_title = h1_section.text().replace('¶', '').strip()
h1_id = div.attr('id')
- h1_content = ""
+ h1_content = ''
next_p = body('h1').next()
while next_p:
if next_p[0].tag == 'div' and 'class' in next_p[0].attrib:
if 'section' in next_p[0].attrib['class']:
break
- h1_content += "\n%s\n" % next_p.html()
+ h1_content += '\n%s\n' % next_p.html()
next_p = next_p.next()
if h1_content:
yield {
@@ -79,7 +84,7 @@ def generate_sections_from_pyquery(body):
for num in range(len(section_list)):
div = section_list.eq(num).parent()
header = section_list.eq(num)
- title = header.text().replace(u'¶', '').strip()
+ title = header.text().replace('¶', '').strip()
section_id = div.attr('id')
content = div.html()
yield {
@@ -108,7 +113,7 @@ def process_file(filename):
return None
if 'body' in data and data['body']:
body = PyQuery(data['body'])
- body_content = body.text().replace(u'¶', '')
+ body_content = body.text().replace('¶', '')
sections.extend(generate_sections_from_pyquery(body))
else:
log.info('Unable to index content for: %s', filename)
@@ -119,9 +124,13 @@ def process_file(filename):
else:
log.info('Unable to index title for: %s', filename)
- return {'headers': process_headers(data, filename),
- 'content': body_content, 'path': path,
- 'title': title, 'sections': sections}
+ return {
+ 'headers': process_headers(data, filename),
+ 'content': body_content,
+ 'path': path,
+ 'title': title,
+ 'sections': sections,
+ }
def recurse_while_none(element):
diff --git a/readthedocs/search/signals.py b/readthedocs/search/signals.py
index 6abdf64cce9..17cc3649155 100644
--- a/readthedocs/search/signals.py
+++ b/readthedocs/search/signals.py
@@ -1,7 +1,9 @@
+# -*- coding: utf-8 -*-
+
"""We define custom Django signals to trigger before executing searches."""
-from __future__ import absolute_import
import django.dispatch
-before_project_search = django.dispatch.Signal(providing_args=["body"])
-before_file_search = django.dispatch.Signal(providing_args=["body"])
-before_section_search = django.dispatch.Signal(providing_args=["body"])
+
+before_project_search = django.dispatch.Signal(providing_args=['body'])
+before_file_search = django.dispatch.Signal(providing_args=['body'])
+before_section_search = django.dispatch.Signal(providing_args=['body'])
diff --git a/readthedocs/search/tests/conftest.py b/readthedocs/search/tests/conftest.py
index 59961f3a7e2..7b21c7c2137 100644
--- a/readthedocs/search/tests/conftest.py
+++ b/readthedocs/search/tests/conftest.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
import random
import string
from random import shuffle
@@ -6,8 +7,14 @@
from django_dynamic_fixture import G
from readthedocs.projects.models import Project
-from readthedocs.search.indexes import Index, ProjectIndex, PageIndex, SectionIndex
-from .dummy_data import DUMMY_PAGE_JSON, ALL_PROJECTS
+from readthedocs.search.indexes import (
+ Index,
+ PageIndex,
+ ProjectIndex,
+ SectionIndex,
+)
+
+from .dummy_data import ALL_PROJECTS, DUMMY_PAGE_JSON
@pytest.fixture(autouse=True)
diff --git a/readthedocs/search/tests/data/docs/story.json b/readthedocs/search/tests/data/docs/story.json
index 69226b65209..10c81a97832 100644
--- a/readthedocs/search/tests/data/docs/story.json
+++ b/readthedocs/search/tests/data/docs/story.json
@@ -29,4 +29,4 @@
}
],
"path": "open-source-philosophy"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/data/docs/wiping.json b/readthedocs/search/tests/data/docs/wiping.json
index a54889e05fa..bbdbc8860a8 100644
--- a/readthedocs/search/tests/data/docs/wiping.json
+++ b/readthedocs/search/tests/data/docs/wiping.json
@@ -12,4 +12,4 @@
}
],
"path": "guides/wipe-environment"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/data/kuma/docker.json b/readthedocs/search/tests/data/kuma/docker.json
index 3f86764073a..6e16f7e9784 100644
--- a/readthedocs/search/tests/data/kuma/docker.json
+++ b/readthedocs/search/tests/data/kuma/docker.json
@@ -22,4 +22,4 @@
}
],
"path": "docker"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/data/kuma/documentation.json b/readthedocs/search/tests/data/kuma/documentation.json
index 310a01d05c8..8c7b44a42e6 100644
--- a/readthedocs/search/tests/data/kuma/documentation.json
+++ b/readthedocs/search/tests/data/kuma/documentation.json
@@ -18,4 +18,4 @@
}
],
"path": "documentation"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/data/pipeline/installation.json b/readthedocs/search/tests/data/pipeline/installation.json
index 30fb78d1d78..22bba4f08fe 100644
--- a/readthedocs/search/tests/data/pipeline/installation.json
+++ b/readthedocs/search/tests/data/pipeline/installation.json
@@ -30,4 +30,4 @@
}
],
"path": "installation"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/data/pipeline/signals.json b/readthedocs/search/tests/data/pipeline/signals.json
index 3bf3a80537c..abed6187b3b 100644
--- a/readthedocs/search/tests/data/pipeline/signals.json
+++ b/readthedocs/search/tests/data/pipeline/signals.json
@@ -24,4 +24,4 @@
}
],
"path": "signals"
-}
\ No newline at end of file
+}
diff --git a/readthedocs/search/tests/dummy_data.py b/readthedocs/search/tests/dummy_data.py
index fbd4eed1f11..2e99d4e7711 100644
--- a/readthedocs/search/tests/dummy_data.py
+++ b/readthedocs/search/tests/dummy_data.py
@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
import json
import os
+
_DATA_FILES = {
'pipeline': ['installation.json', 'signals.json'],
'kuma': ['documentation.json', 'docker.json'],
@@ -14,7 +16,7 @@ def _get_dummy_json():
data = []
for file_name in value:
current_path = os.path.abspath(os.path.dirname(__file__))
- path = os.path.join(current_path, "data", key, file_name)
+ path = os.path.join(current_path, 'data', key, file_name)
with open(path) as f:
content = json.load(f)
data.append(content)
diff --git a/readthedocs/search/tests/test_views.py b/readthedocs/search/tests/test_views.py
index 07444a731fb..bfb7058fc05 100644
--- a/readthedocs/search/tests/test_views.py
+++ b/readthedocs/search/tests/test_views.py
@@ -14,7 +14,7 @@
@pytest.mark.django_db
@pytest.mark.search
-class TestElasticSearch(object):
+class TestElasticSearch:
url = reverse_lazy('search')
@@ -35,19 +35,23 @@ def elastic_index(self, mock_parse_json, all_projects, es_index):
self._reindex_elasticsearch(es_index=es_index)
def test_search_by_project_name(self, client, project):
- result, _ = self._get_search_result(url=self.url, client=client,
- search_params={'q': project.name})
+ result, _ = self._get_search_result(
+ url=self.url, client=client,
+ search_params={'q': project.name},
+ )
assert project.name.encode('utf-8') in result.text().encode('utf-8')
def test_search_project_show_languages(self, client, project, es_index):
- """Test that searching project should show all available languages"""
+ """Test that searching project should show all available languages."""
# Create a project in bn and add it as a translation
G(Project, language='bn', name=project.name)
self._reindex_elasticsearch(es_index=es_index)
- result, page = self._get_search_result(url=self.url, client=client,
- search_params={'q': project.name})
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params={'q': project.name},
+ )
content = page.find('.navigable .language-list')
# There should be 2 languages
@@ -55,14 +59,16 @@ def test_search_project_show_languages(self, client, project, es_index):
assert 'bn' in content.text()
def test_search_project_filter_language(self, client, project, es_index):
- """Test that searching project filtered according to language"""
+ """Test that searching project filtered according to language."""
# Create a project in bn and add it as a translation
translate = G(Project, language='bn', name=project.name)
self._reindex_elasticsearch(es_index=es_index)
search_params = {'q': project.name, 'language': 'bn'}
- result, page = self._get_search_result(url=self.url, client=client,
- search_params=search_params)
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params=search_params,
+ )
# There should be only 1 result
assert len(result) == 1
@@ -75,20 +81,27 @@ def test_search_project_filter_language(self, client, project, es_index):
@pytest.mark.parametrize('data_type', ['content', 'headers', 'title'])
@pytest.mark.parametrize('page_num', [0, 1])
def test_search_by_file_content(self, client, project, data_type, page_num):
- query = get_search_query_from_project_file(project_slug=project.slug, page_num=page_num,
- data_type=data_type)
-
- result, _ = self._get_search_result(url=self.url, client=client,
- search_params={'q': query, 'type': 'file'})
+ query = get_search_query_from_project_file(
+ project_slug=project.slug, page_num=page_num,
+ data_type=data_type,
+ )
+
+ result, _ = self._get_search_result(
+ url=self.url, client=client,
+ search_params={'q': query, 'type': 'file'},
+ )
assert len(result) == 1
def test_file_search_show_projects(self, client):
- """Test that search result page shows list of projects while searching for files"""
+ """Test that search result page shows list of projects while searching
+ for files."""
# `Github` word is present both in `kuma` and `pipeline` files
# so search with this phrase
- result, page = self._get_search_result(url=self.url, client=client,
- search_params={'q': 'GitHub', 'type': 'file'})
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params={'q': 'GitHub', 'type': 'file'},
+ )
# There should be 2 search result
assert len(result) == 2
@@ -102,13 +115,15 @@ def test_file_search_show_projects(self, client):
assert 'kuma' and 'pipeline' in text
def test_file_search_filter_by_project(self, client):
- """Test that search result are filtered according to project"""
+ """Test that search result are filtered according to project."""
# `Github` word is present both in `kuma` and `pipeline` files
# so search with this phrase but filter through `kuma` project
search_params = {'q': 'GitHub', 'type': 'file', 'project': 'kuma'}
- result, page = self._get_search_result(url=self.url, client=client,
- search_params=search_params)
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params=search_params,
+ )
# There should be 1 search result as we have filtered
assert len(result) == 1
@@ -122,11 +137,11 @@ def test_file_search_filter_by_project(self, client):
# as the query is present in both projects
content = page.find('.navigable .project-list')
if len(content) != 2:
- pytest.xfail("failing because currently all projects are not showing in project list")
+ pytest.xfail('failing because currently all projects are not showing in project list')
else:
assert 'kuma' and 'pipeline' in content.text()
- @pytest.mark.xfail(reason="Versions are not showing correctly! Fixme while rewrite!")
+ @pytest.mark.xfail(reason='Versions are not showing correctly! Fixme while rewrite!')
def test_file_search_show_versions(self, client, all_projects, es_index, settings):
# override the settings to index all versions
settings.INDEX_ONLY_LATEST = False
@@ -138,8 +153,10 @@ def test_file_search_show_versions(self, client, all_projects, es_index, setting
query = get_search_query_from_project_file(project_slug=project.slug)
- result, page = self._get_search_result(url=self.url, client=client,
- search_params={'q': query, 'type': 'file'})
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params={'q': query, 'type': 'file'},
+ )
# There should be only one result because by default
# only latest version result should be there
@@ -161,7 +178,7 @@ def test_file_search_show_versions(self, client, all_projects, es_index, setting
assert sorted(project_versions) == sorted(content_versions)
def test_file_search_subprojects(self, client, all_projects, es_index):
- """File search should return results from subprojects also"""
+ """File search should return results from subprojects also."""
project = all_projects[0]
subproject = all_projects[1]
# Add another project as subproject of the project
@@ -171,7 +188,9 @@ def test_file_search_subprojects(self, client, all_projects, es_index):
# Now search with subproject content but explicitly filter by the parent project
query = get_search_query_from_project_file(project_slug=subproject.slug)
search_params = {'q': query, 'type': 'file', 'project': project.slug}
- result, page = self._get_search_result(url=self.url, client=client,
- search_params=search_params)
+ result, page = self._get_search_result(
+ url=self.url, client=client,
+ search_params=search_params,
+ )
assert len(result) == 1
diff --git a/readthedocs/search/tests/utils.py b/readthedocs/search/tests/utils.py
index a48ea83dd74..80bf0fadff6 100644
--- a/readthedocs/search/tests/utils.py
+++ b/readthedocs/search/tests/utils.py
@@ -1,9 +1,12 @@
+# -*- coding: utf-8 -*-
from readthedocs.search.tests.dummy_data import DUMMY_PAGE_JSON
def get_search_query_from_project_file(project_slug, page_num=0, data_type='title'):
- """Return search query from the project's page file.
- Query is generated from the value of `data_type`
+ """
+ Return search query from the project's page file.
+
+ Query is generated from the value of `data_type`
"""
all_pages = DUMMY_PAGE_JSON[project_slug]
diff --git a/readthedocs/search/utils.py b/readthedocs/search/utils.py
index a742a341912..85c3c5735a3 100644
--- a/readthedocs/search/utils.py
+++ b/readthedocs/search/utils.py
@@ -1,16 +1,14 @@
# -*- coding: utf-8 -*-
-"""Utilities related to reading and generating indexable search content."""
-from __future__ import absolute_import
+"""Utilities related to reading and generating indexable search content."""
-import os
-import fnmatch
-import re
import codecs
-import logging
+import fnmatch
import json
+import logging
+import os
+import re
-from builtins import next, range
from pyquery import PyQuery
@@ -23,7 +21,10 @@ def process_mkdocs_json(version, build_dir=True):
full_path = version.project.full_json_path(version.slug)
else:
full_path = version.project.get_production_media_path(
- type_='json', version_slug=version.slug, include_file=False)
+ type_='json',
+ version_slug=version.slug,
+ include_file=False,
+ )
html_files = []
for root, _, files in os.walk(full_path):
@@ -35,8 +36,14 @@ def process_mkdocs_json(version, build_dir=True):
continue
relative_path = parse_path_from_file(file_path=filename)
html = parse_content_from_file(file_path=filename)
- headers = parse_headers_from_file(documentation_type='mkdocs', file_path=filename)
- sections = parse_sections_from_file(documentation_type='mkdocs', file_path=filename)
+ headers = parse_headers_from_file(
+ documentation_type='mkdocs',
+ file_path=filename,
+ )
+ sections = parse_sections_from_file(
+ documentation_type='mkdocs',
+ file_path=filename,
+ )
try:
title = sections[0]['title']
except IndexError:
@@ -61,7 +68,7 @@ def valid_mkdocs_json(file_path):
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
- except IOError as e:
+ except IOError:
log.warning(
'(Search Index) Unable to index file: %s',
file_path,
@@ -74,7 +81,10 @@ def valid_mkdocs_json(file_path):
page_json = json.loads(content)
for to_check in ['url', 'content']:
if to_check not in page_json:
- log.warning('(Search Index) Unable to index file: %s error: Invalid JSON', file_path)
+ log.warning(
+ '(Search Index) Unable to index file: %s error: Invalid JSON',
+ file_path,
+ )
return None
return True
@@ -85,7 +95,7 @@ def parse_path_from_file(file_path):
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
- except IOError as e:
+ except IOError:
log.warning(
'(Search Index) Unable to index file: %s',
file_path,
@@ -114,7 +124,7 @@ def parse_content_from_file(file_path):
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
- except IOError as e:
+ except IOError:
log.info(
'(Search Index) Unable to index file: %s',
file_path,
@@ -128,7 +138,10 @@ def parse_content_from_file(file_path):
content = parse_content(page_content)
if not content:
- log.info('(Search Index) Unable to index file: %s, empty file', file_path)
+ log.info(
+ '(Search Index) Unable to index file: %s, empty file',
+ file_path,
+ )
else:
log.debug('(Search Index) %s length: %s', file_path, len(content))
return content
@@ -152,7 +165,7 @@ def parse_headers_from_file(documentation_type, file_path):
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
- except IOError as e:
+ except IOError:
log.info(
'(Search Index) Unable to index file: %s',
file_path,
@@ -183,7 +196,7 @@ def parse_sections_from_file(documentation_type, file_path):
try:
with codecs.open(file_path, encoding='utf-8', mode='r') as f:
content = f.read()
- except IOError as e:
+ except IOError:
log.info(
'(Search Index) Unable to index file: %s',
file_path,
@@ -207,15 +220,15 @@ def parse_sphinx_sections(content):
h1_section = body('.section > h1')
if h1_section:
div = h1_section.parent()
- h1_title = h1_section.text().replace(u'¶', '').strip()
+ h1_title = h1_section.text().replace('¶', '').strip()
h1_id = div.attr('id')
- h1_content = ""
+ h1_content = ''
next_p = next(body('h1')) # pylint: disable=stop-iteration-return
while next_p:
if next_p[0].tag == 'div' and 'class' in next_p[0].attrib:
if 'section' in next_p[0].attrib['class']:
break
- h1_content += "\n%s\n" % next_p.html()
+ h1_content += '\n%s\n' % next_p.html()
next_p = next(next_p) # pylint: disable=stop-iteration-return
if h1_content:
yield {
@@ -229,7 +242,7 @@ def parse_sphinx_sections(content):
for num in range(len(section_list)):
div = section_list.eq(num).parent()
header = section_list.eq(num)
- title = header.text().replace(u'¶', '').strip()
+ title = header.text().replace('¶', '').strip()
section_id = div.attr('id')
content = div.html()
yield {
@@ -252,14 +265,14 @@ def parse_mkdocs_sections(content):
h1 = body('h1')
h1_id = h1.attr('id')
h1_title = h1.text().strip()
- h1_content = ""
+ h1_content = ''
next_p = next(body('h1')) # pylint: disable=stop-iteration-return
while next_p:
if next_p[0].tag == 'h2':
break
h1_html = next_p.html()
if h1_html:
- h1_content += "\n%s\n" % h1_html
+ h1_content += '\n%s\n' % h1_html
next_p = next(next_p) # pylint: disable=stop-iteration-return
if h1_content:
yield {
@@ -274,14 +287,14 @@ def parse_mkdocs_sections(content):
h2 = section_list.eq(num)
h2_title = h2.text().strip()
section_id = h2.attr('id')
- h2_content = ""
+ h2_content = ''
next_p = next(body('h2')) # pylint: disable=stop-iteration-return
while next_p:
if next_p[0].tag == 'h2':
break
h2_html = next_p.html()
if h2_html:
- h2_content += "\n%s\n" % h2_html
+ h2_content += '\n%s\n' % h2_html
next_p = next(next_p) # pylint: disable=stop-iteration-return
if h2_content:
yield {
diff --git a/readthedocs/search/views.py b/readthedocs/search/views.py
index bac1969e80e..8615f183a0f 100644
--- a/readthedocs/search/views.py
+++ b/readthedocs/search/views.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
-"""Search views."""
-from __future__ import (
- absolute_import, division, print_function, unicode_literals)
+"""Search views."""
import collections
import logging
from pprint import pprint
@@ -13,8 +11,9 @@
from readthedocs.builds.constants import LATEST
from readthedocs.search import lib as search_lib
+
log = logging.getLogger(__name__)
-LOG_TEMPLATE = u'(Elastic Search) [{user}:{type}] [{project}:{version}:{language}] {msg}'
+LOG_TEMPLATE = '(Elastic Search) [{user}:{type}] [{project}:{version}:{language}] {msg}'
UserInput = collections.namedtuple(
'UserInput',
@@ -46,11 +45,18 @@ def elastic_search(request):
if user_input.query:
if user_input.type == 'project':
results = search_lib.search_project(
- request, user_input.query, language=user_input.language)
+ request,
+ user_input.query,
+ language=user_input.language,
+ )
elif user_input.type == 'file':
results = search_lib.search_file(
- request, user_input.query, project_slug=user_input.project,
- version_slug=user_input.version, taxonomy=user_input.taxonomy)
+ request,
+ user_input.query,
+ project_slug=user_input.project,
+ version_slug=user_input.version,
+ taxonomy=user_input.taxonomy,
+ )
if results:
# pre and post 1.0 compat
@@ -82,7 +88,8 @@ def elastic_search(request):
version=user_input.version or '',
language=user_input.language or '',
msg=user_input.query or '',
- ))
+ ),
+ )
template_vars = user_input._asdict()
template_vars.update({
diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py
index a6a3e867978..205dbdd9989 100644
--- a/readthedocs/settings/base.py
+++ b/readthedocs/settings/base.py
@@ -43,7 +43,7 @@ class CommunityBaseSettings(Settings):
PUBLIC_DOMAIN = None
PUBLIC_DOMAIN_USES_HTTPS = False
USE_SUBDOMAIN = False
- PUBLIC_API_URL = 'https://{0}'.format(PRODUCTION_DOMAIN)
+ PUBLIC_API_URL = 'https://{}'.format(PRODUCTION_DOMAIN)
# Email
DEFAULT_FROM_EMAIL = 'no-reply@readthedocs.org'
diff --git a/readthedocs/settings/dev.py b/readthedocs/settings/dev.py
index 7fa4dafe959..8fd9860a0bd 100644
--- a/readthedocs/settings/dev.py
+++ b/readthedocs/settings/dev.py
@@ -50,7 +50,7 @@ def DATABASES(self): # noqa
@property
def LOGGING(self): # noqa - avoid pep8 N802
- logging = super(CommunityDevSettings, self).LOGGING
+ logging = super().LOGGING
logging['formatters']['default']['format'] = '[%(asctime)s] ' + self.LOG_FORMAT
# Allow Sphinx and other tools to create loggers
logging['disable_existing_loggers'] = False
diff --git a/readthedocs/templates/account/email/email_confirmation_message.html b/readthedocs/templates/account/email/email_confirmation_message.html
index bc37aa55a8e..a74fa11dfc3 100644
--- a/readthedocs/templates/account/email/email_confirmation_message.html
+++ b/readthedocs/templates/account/email/email_confirmation_message.html
@@ -15,4 +15,3 @@
{% trans "If you did not sign up for an account with Read the Docs, you can disregard this email." %}
{% endblock %}
-
diff --git a/readthedocs/templates/core/badge_markup.html b/readthedocs/templates/core/badge_markup.html
index f739b43f49d..f662e2ed689 100644
--- a/readthedocs/templates/core/badge_markup.html
+++ b/readthedocs/templates/core/badge_markup.html
@@ -22,7 +22,7 @@
HTML
-
+
<a href='{{ site_url }}'>
<img src='{{ badge_url }}' alt='Documentation Status' />
</a>
diff --git a/readthedocs/templates/core/build_list_detailed.html b/readthedocs/templates/core/build_list_detailed.html
index 33b3b754a7a..ee5bb8c442d 100644
--- a/readthedocs/templates/core/build_list_detailed.html
+++ b/readthedocs/templates/core/build_list_detailed.html
@@ -13,4 +13,3 @@
{% empty %}
{% trans "No builds found" %}
{% endfor %}
-
diff --git a/readthedocs/templates/core/project_list.html b/readthedocs/templates/core/project_list.html
index 64018b714e4..642525ab02a 100644
--- a/readthedocs/templates/core/project_list.html
+++ b/readthedocs/templates/core/project_list.html
@@ -4,4 +4,3 @@
{{ project.name }}
{% endfor %}
-
diff --git a/readthedocs/templates/core/project_list_detailed.html b/readthedocs/templates/core/project_list_detailed.html
index 96e660b3b80..1eb24632650 100644
--- a/readthedocs/templates/core/project_list_detailed.html
+++ b/readthedocs/templates/core/project_list_detailed.html
@@ -40,7 +40,7 @@
{% for version in project.ordered_active_versions reversed %}
{{ version.slug }}
{% endfor %}
-
+
{% else %}
{% endblock %}
-
-
diff --git a/readthedocs/templates/projects/domain_form.html b/readthedocs/templates/projects/domain_form.html
index 307e089a39a..ed4688751a9 100644
--- a/readthedocs/templates/projects/domain_form.html
+++ b/readthedocs/templates/projects/domain_form.html
@@ -41,4 +41,3 @@
{% endblock %}
-
diff --git a/readthedocs/templates/projects/domain_list.html b/readthedocs/templates/projects/domain_list.html
index eab3abb587f..be4290e41ff 100644
--- a/readthedocs/templates/projects/domain_list.html
+++ b/readthedocs/templates/projects/domain_list.html
@@ -33,7 +33,7 @@ {% trans "Existing Domains" %}
{% endif %}
-
+
{% trans "Add new Domain" %}
{% endblock %}
-
diff --git a/readthedocs/templates/projects/project_analytics.html b/readthedocs/templates/projects/project_analytics.html
index 5ca9ab7d86c..bf682e90ec5 100644
--- a/readthedocs/templates/projects/project_analytics.html
+++ b/readthedocs/templates/projects/project_analytics.html
@@ -64,7 +64,7 @@ {% trans "Pages" %}
{% for page, count in page_list %}
-
- {{ page }}
+ {{ page }}
{{ count }}
({{ analytics.scaled_page|key:page }}%)
@@ -77,7 +77,7 @@ {% trans "Versions" %}
{% for version, count in version_list %}
-
- {{ version }}
+ {{ version }}
{{ count }}
({{ analytics.scaled_version|key:version }}%)
diff --git a/readthedocs/templates/projects/project_version_list.html b/readthedocs/templates/projects/project_version_list.html
index a83273fe5e0..f85f3be0de7 100644
--- a/readthedocs/templates/projects/project_version_list.html
+++ b/readthedocs/templates/projects/project_version_list.html
@@ -113,7 +113,7 @@ {% trans "Inactive Versions" %}
{% endblock inactive-versions %}
-
+
{% endfor %}
diff --git a/readthedocs/templates/search/elastic_search.html b/readthedocs/templates/search/elastic_search.html
index b14ad50b20f..387bf85aa33 100644
--- a/readthedocs/templates/search/elastic_search.html
+++ b/readthedocs/templates/search/elastic_search.html
@@ -68,7 +68,7 @@ {% trans 'Version' %}
{{ name }}
{% else %}
{{ name }}
- {% endif %}
+ {% endif %}
({{ count }})
@@ -86,7 +86,7 @@ {% trans 'Taxonomy' %}
{{ name }}
{% else %}
{{ name }}
- {% endif %}
+ {% endif %}
({{ count }})
@@ -96,7 +96,7 @@ {% trans 'Taxonomy' %}
{% endif %}
{% endif %}
-
+
{% block sponsor %}